commit_message
stringlengths 3
2.32k
| diff
stringlengths 186
49.5k
| concern_count
int64 1
5
| shas
stringlengths 44
220
| types
stringlengths 7
45
|
---|---|---|---|---|
update renovate config
|
["diff --git a/.github/workflows/renovate.yml b/.github/workflows/renovate.yml\nindex 710d1f6..c83d681 100644\n--- a/.github/workflows/renovate.yml\n+++ b/.github/workflows/renovate.yml\n@@ -1,7 +1,7 @@\n name: Renovate\n on:\n schedule:\n- - cron: '0/5 * * * *'\n+ - cron: '0 */8 * * *'\n jobs:\n renovate:\n runs-on: ubuntu-latest\n"]
| 1 |
["6132632f7517195c7333308e9c667cdb756bf0af"]
|
["cicd"]
|
simplyfy statement
|
["diff --git a/src/Object/Merge.ts b/src/Object/Merge.ts\nindex 1f48efb..06caad1 100644\n--- a/src/Object/Merge.ts\n+++ b/src/Object/Merge.ts\n@@ -96,9 +96,11 @@ type ChooseMergeDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _MergeDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? MergeProp<O, O1, K, OOK, style>\n- : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? MergeProp<O, O1, K, OOK, style>\n+ : LibStyle<ChooseMergeDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\ndiff --git a/src/Object/Patch.ts b/src/Object/Patch.ts\nindex 2d73784..2c8bd42 100644\n--- a/src/Object/Patch.ts\n+++ b/src/Object/Patch.ts\n@@ -89,9 +89,11 @@ type ChoosePatchDeep<OK, O1K, K extends Key, OOK extends Key, style extends Merg\n @hidden\n */\n export type _PatchDeep<O, O1, K extends Key, OOK extends Key, style extends MergeStyle> =\n- Or<Extends<[O], [never]>, Extends<[O1], [never]>> extends 1 // filter never\n+ [O] extends [never]\n ? PatchProp<O, O1, K, OOK>\n- : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n+ : [O1] extends [never]\n+ ? PatchProp<O, O1, K, OOK>\n+ : LibStyle<ChoosePatchDeep<NoList<O>, NoList<O1>, K, OOK, style>, O, O1, style>\n \n /**\n @hidden\n"]
| 1 |
["f86944ff00b970d7e2da48abbff43e58bdf29b99"]
|
["refactor"]
|
add Expr.equals benchmark
|
["diff --git a/ibis/tests/benchmarks/test_benchmarks.py b/ibis/tests/benchmarks/test_benchmarks.py\nindex 78305bb..9c7e6d7 100644\n--- a/ibis/tests/benchmarks/test_benchmarks.py\n+++ b/ibis/tests/benchmarks/test_benchmarks.py\n@@ -1,3 +1,4 @@\n+import copy\n import functools\n import itertools\n import string\n@@ -340,8 +341,9 @@ def test_execute(benchmark, expression_fn, pt):\n benchmark(expr.execute)\n \n \n-def test_repr_tpc_h02(benchmark):\n- part = ibis.table(\n+@pytest.fixture\n+def part():\n+ return ibis.table(\n dict(\n p_partkey=\"int64\",\n p_size=\"int64\",\n@@ -350,7 +352,11 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"part\",\n )\n- supplier = ibis.table(\n+\n+\n+@pytest.fixture\n+def supplier():\n+ return ibis.table(\n dict(\n s_suppkey=\"int64\",\n s_nationkey=\"int64\",\n@@ -362,7 +368,11 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"supplier\",\n )\n- partsupp = ibis.table(\n+\n+\n+@pytest.fixture\n+def partsupp():\n+ return ibis.table(\n dict(\n ps_partkey=\"int64\",\n ps_suppkey=\"int64\",\n@@ -370,14 +380,25 @@ def test_repr_tpc_h02(benchmark):\n ),\n name=\"partsupp\",\n )\n- nation = ibis.table(\n+\n+\n+@pytest.fixture\n+def nation():\n+ return ibis.table(\n dict(n_nationkey=\"int64\", n_regionkey=\"int64\", n_name=\"string\"),\n name=\"nation\",\n )\n- region = ibis.table(\n+\n+\n+@pytest.fixture\n+def region():\n+ return ibis.table(\n dict(r_regionkey=\"int64\", r_name=\"string\"), name=\"region\"\n )\n \n+\n+@pytest.fixture\n+def tpc_h02(part, supplier, partsupp, nation, region):\n REGION = \"EUROPE\"\n SIZE = 25\n TYPE = \"BRASS\"\n@@ -420,7 +441,7 @@ def test_repr_tpc_h02(benchmark):\n ]\n )\n \n- expr = q.sort_by(\n+ return q.sort_by(\n [\n ibis.desc(q.s_acctbal),\n q.n_name,\n@@ -429,7 +450,9 @@ def test_repr_tpc_h02(benchmark):\n ]\n ).limit(100)\n \n- benchmark(repr, expr)\n+\n+def test_repr_tpc_h02(benchmark, tpc_h02):\n+ benchmark(repr, tpc_h02)\n \n \n def test_repr_huge_union(benchmark):\n@@ -478,3 +501,7 @@ def test_complex_datatype_builtins(benchmark, func):\n )\n )\n benchmark(func, datatype)\n+\n+\n+def test_large_expr_equals(benchmark, tpc_h02):\n+ benchmark(ir.Expr.equals, tpc_h02, copy.deepcopy(tpc_h02))\n"]
| 1 |
["b700285c1f27588922d9c56527cee721bb884682"]
|
["test"]
|
refactor to get ride of cloneDeep
|
["diff --git a/config/webpack.config.prod.js b/config/webpack.config.prod.js\nindex 3d2e5a6..e5219bd 100644\n--- a/config/webpack.config.prod.js\n+++ b/config/webpack.config.prod.js\n@@ -56,7 +56,7 @@ const extractTextPluginOptions = shouldUseRelativeAssetPaths\n const entries = fs.readdirSync(paths.appSrc)\n .filter(name => !name.startsWith('_'))\n .map(name => ({name, dirPath: path.join(paths.appSrc, name)}))\n- .filter(({name, dirPath}) => !/^assets|components|manifest|typings$/.test(name) && fs.lstatSync(dirPath).isDirectory())\n+ .filter(({name, dirPath}) => !/^assets|components|manifest|typings|app-config$/.test(name) && fs.lstatSync(dirPath).isDirectory())\n \n // This is the production configuration.\n // It compiles slowly and is focused on producing a fast and minimal bundle.\ndiff --git a/src/app-config/context-menus.ts b/src/app-config/context-menus.ts\nnew file mode 100644\nindex 0000000..a733b01\n--- /dev/null\n+++ b/src/app-config/context-menus.ts\n@@ -0,0 +1,27 @@\n+export function getAllContextMenus () {\n+ const allContextMenus = {\n+ google_page_translate: 'x',\n+ youdao_page_translate: 'x',\n+ google_search: 'https://www.google.com/#newwindow=1&q=%s',\n+ baidu_search: 'https://www.baidu.com/s?ie=utf-8&wd=%s',\n+ bing_search: 'https://www.bing.com/search?q=%s',\n+ google_translate: 'https://translate.google.cn/#auto/zh-CN/%s',\n+ etymonline: 'http://www.etymonline.com/index.php?search=%s',\n+ merriam_webster: 'http://www.merriam-webster.com/dictionary/%s',\n+ oxford: 'http://www.oxforddictionaries.com/us/definition/english/%s',\n+ cambridge: 'http://dictionary.cambridge.org/spellcheck/english-chinese-simplified/?q=%s',\n+ youdao: 'http://dict.youdao.com/w/%s',\n+ dictcn: 'https://dict.eudic.net/dicts/en/%s',\n+ iciba: 'http://www.iciba.com/%s',\n+ liangan: 'https://www.moedict.tw/~%s',\n+ guoyu: 'https://www.moedict.tw/%s',\n+ longman_business: 'http://www.ldoceonline.com/search/?q=%s',\n+ bing_dict: 'https://cn.bing.com/dict/?q=%s'\n+ }\n+\n+ // Just for type check. Keys in allContextMenus are useful so no actual assertion\n+ // tslint:disable-next-line:no-unused-expression\n+ allContextMenus as { [id: string]: string }\n+\n+ return allContextMenus\n+}\ndiff --git a/src/app-config/dicts.ts b/src/app-config/dicts.ts\nnew file mode 100644\nindex 0000000..905d2de\n--- /dev/null\n+++ b/src/app-config/dicts.ts\n@@ -0,0 +1,398 @@\n+import { DeepReadonly } from '@/typings/helpers'\n+\n+export function getALlDicts () {\n+ const allDicts = {\n+ bing: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word.\n+ */\n+ page: 'https://cn.bing.com/dict/search?q=%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 240,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ },\n+ /** Optional dict custom options. Can only be boolean or number. */\n+ options: {\n+ tense: true,\n+ phsym: true,\n+ cdef: true,\n+ related: true,\n+ sentence: 4\n+ }\n+ },\n+ business: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'http://www.ldoceonline.com/search/?q=%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 265,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ }\n+ },\n+ cobuild: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'https://www.collinsdictionary.com/dictionary/%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 300,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ },\n+ /** Optional dict custom options. Can only be boolean or number. */\n+ options: {\n+ sentence: 4\n+ }\n+ },\n+ dictcn: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'http://dict.cn/%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 300,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ },\n+ /** Optional dict custom options. Can only be boolean or number. */\n+ options: {\n+ chart: true,\n+ etym: true\n+ }\n+ },\n+ etymonline: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'http://www.etymonline.com/search?q=%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 265,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ },\n+ /** Optional dict custom options. Can only be boolean or number. */\n+ options: {\n+ resultnum: 2\n+ }\n+ },\n+ google: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'https://translate.google.com/#auto/zh-CN/%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 110,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ }\n+ },\n+ guoyu: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'https://www.moedict.tw/%z',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 265,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ }\n+ },\n+ liangan: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'https://www.moedict.tw/~%z',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 265,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ }\n+ },\n+ macmillan: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'http://www.macmillandictionary.com/dictionary/british/%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 265,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ }\n+ },\n+ urban: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'http://www.urbandictionary.com/define.php?term=%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 180,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ },\n+ /** Optional dict custom options. Can only be boolean or number. */\n+ options: {\n+ resultnum: 4\n+ }\n+ },\n+ vocabulary: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'https://www.vocabulary.com/dictionary/%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 180,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ }\n+ },\n+ zdic: {\n+ /**\n+ * Full content page to jump to when user clicks the title.\n+ * %s will be replaced with the current word.\n+ * %z will be replaced with the traditional Chinese version of the current word\n+ */\n+ page: 'http://www.zdic.net/search/?c=1&q=%s',\n+ /**\n+ * If set to true, the dict start searching automatically.\n+ * Otherwise it'll only start seaching when user clicks the unfold button.\n+ * Default MUST be true and let user decide.\n+ */\n+ defaultUnfold: true,\n+ /**\n+ * This is the default height when the dict first renders the result.\n+ * If the content height is greater than the preferred height,\n+ * the preferred height is used and a mask with a view-more button is shown.\n+ * Otherwise the content height is used.\n+ */\n+ preferredHeight: 400,\n+ /**\n+ * Only start searching if the selection contains the language.\n+ * Better set default to true and let user decide.\n+ */\n+ selectionLang: {\n+ eng: true,\n+ chs: true\n+ }\n+ },\n+ }\n+\n+ // Just for type check. Keys in allDicts are useful so no actual assertion\n+ // tslint:disable-next-line:no-unused-expression\n+ allDicts as {\n+ [id: string]: {\n+ page: string\n+ defaultUnfold: boolean\n+ preferredHeight: number\n+ selectionLang: {\n+ eng: boolean\n+ chs: boolean\n+ }\n+ options?: {\n+ [option: string]: number | boolean\n+ }\n+ }\n+ }\n+\n+ return allDicts\n+}\ndiff --git a/src/app-config/index.ts b/src/app-config/index.ts\nindex 350cd8f..879a312 100644\n--- a/src/app-config/index.ts\n+++ b/src/app-config/index.ts\n@@ -1,5 +1,6 @@\n-import cloneDeep from 'lodash/cloneDeep'\n-import { DeepReadonly } from './typings/helpers'\n+import { DeepReadonly } from '@/typings/helpers'\n+import { getALlDicts } from './dicts'\n+import { getAllContextMenus } from './context-menus'\n \n const langUI = (browser.i18n.getUILanguage() || 'en').replace('-', '_')\n const langCode = /^zh_CN|zh_TW|en$/.test(langUI)\n@@ -8,220 +9,11 @@ const langCode = /^zh_CN|zh_TW|en$/.test(langUI)\n : langUI\n : 'en'\n \n-const allDicts = {\n- bing: {\n- page: 'https://cn.bing.com/dict/search?q=%s',\n- defaultUnfold: true,\n- preferredHeight: 240,\n- selectionLang: {\n- eng: true,\n- chs: true\n- },\n- options: {\n- tense: true,\n- phsym: true,\n- cdef: true,\n- related: true,\n- sentence: 4\n- }\n- },\n- business: {\n- page: 'http://www.ldoceonline.com/search/?q=%s',\n- defaultUnfold: true,\n- preferredHeight: 265,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n- cobuild: {\n- page: 'https://www.collinsdictionary.com/dictionary/%s',\n- defaultUnfold: true,\n- preferredHeight: 300,\n- selectionLang: {\n- eng: true,\n- chs: true\n- },\n- options: {\n- sentence: 4\n- }\n- },\n- dictcn: {\n- page: 'http://dict.cn/%s',\n- defaultUnfold: true,\n- preferredHeight: 300,\n- selectionLang: {\n- eng: true,\n- chs: true\n- },\n- options: {\n- chart: true,\n- etym: true\n- }\n- },\n- etymonline: {\n- page: 'http://www.etymonline.com/search?q=%s',\n- defaultUnfold: true,\n- preferredHeight: 265,\n- selectionLang: {\n- eng: true,\n- chs: true\n- },\n- options: {\n- resultnum: 2\n- }\n- },\n- eudic: {\n- page: 'https://dict.eudic.net/dicts/en/%s',\n- defaultUnfold: true,\n- preferredHeight: 265,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n- google: {\n- page: 'https://translate.google.com/#auto/zh-CN/%s',\n- defaultUnfold: true,\n- preferredHeight: 110,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n- guoyu: {\n- page: 'https://www.moedict.tw/%z',\n- defaultUnfold: true,\n- preferredHeight: 265,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n- howjsay: {\n- page: 'http://www.howjsay.com/index.php?word=%s',\n- defaultUnfold: true,\n- preferredHeight: 265,\n- selectionLang: {\n- eng: true,\n- chs: true\n- },\n- options: {\n- related: true\n- }\n- },\n- liangan: {\n- page: 'https://www.moedict.tw/~%z',\n- defaultUnfold: true,\n- preferredHeight: 265,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n- macmillan: {\n- page: 'http://www.macmillandictionary.com/dictionary/british/%s',\n- defaultUnfold: true,\n- preferredHeight: 265,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n- urban: {\n- page: 'http://www.urbandictionary.com/define.php?term=%s',\n- defaultUnfold: true,\n- preferredHeight: 180,\n- selectionLang: {\n- eng: true,\n- chs: true\n- },\n- options: {\n- resultnum: 4\n- }\n- },\n- vocabulary: {\n- page: 'https://www.vocabulary.com/dictionary/%s',\n- defaultUnfold: true,\n- preferredHeight: 180,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n- wordreference: {\n- page: 'http://www.wordreference.com/definition/%s',\n- defaultUnfold: true,\n- preferredHeight: 180,\n- selectionLang: {\n- eng: true,\n- chs: true\n- },\n- options: {\n- etym: true,\n- idiom: true\n- }\n- },\n- zdic: {\n- page: 'http://www.zdic.net/search/?c=1&q=%s',\n- defaultUnfold: true,\n- preferredHeight: 400,\n- selectionLang: {\n- eng: true,\n- chs: true\n- }\n- },\n-}\n-\n-// Just for type check. Keys in allDicts are useful so no actual assertion\n-// tslint:disable-next-line:no-unused-expression\n-allDicts as {\n- [id: string]: {\n- /** url for the complete result */\n- page: string\n- /** lazy load */\n- defaultUnfold: boolean\n- /** content below the preferrred height will be hidden by default */\n- preferredHeight: number\n- /** only search when the selection contains the language */\n- selectionLang: {\n- eng: boolean\n- chs: boolean\n- }\n- /** other options */\n- options?: {\n- [option: string]: number | boolean\n- }\n- }\n-}\n-\n-export type DictID = keyof typeof allDicts\n-\n-const allContextMenus = {\n- google_page_translate: 'x',\n- youdao_page_translate: 'x',\n- google_search: 'https://www.google.com/#newwindow=1&q=%s',\n- baidu_search: 'https://www.baidu.com/s?ie=utf-8&wd=%s',\n- bing_search: 'https://www.bing.com/search?q=%s',\n- google_translate: 'https://translate.google.cn/#auto/zh-CN/%s',\n- etymonline: 'http://www.etymonline.com/index.php?search=%s',\n- merriam_webster: 'http://www.merriam-webster.com/dictionary/%s',\n- oxford: 'http://www.oxforddictionaries.com/us/definition/english/%s',\n- cambridge: 'http://dictionary.cambridge.org/spellcheck/english-chinese-simplified/?q=%s',\n- youdao: 'http://dict.youdao.com/w/%s',\n- dictcn: 'https://dict.eudic.net/dicts/en/%s',\n- iciba: 'http://www.iciba.com/%s',\n- liangan: 'https://www.moedict.tw/~%s',\n- guoyu: 'https://www.moedict.tw/%s',\n- longman_business: 'http://www.ldoceonline.com/search/?q=%s',\n- bing_dict: 'https://cn.bing.com/dict/?q=%s'\n-}\n-\n-// Just for type check. Keys in allContextMenus are useful so no actual assertion\n-// tslint:disable-next-line:no-unused-expression\n-allContextMenus as { [id: string]: string }\n+export type DictConfigsMutable = ReturnType<typeof getALlDicts>\n+export type DictConfigs = DeepReadonly<DictConfigsMutable>\n+export type DictID = keyof DictConfigsMutable\n \n-export type ContextMenuDictID = keyof typeof allContextMenus\n+export type ContextMenuDictID = keyof ReturnType<typeof getAllContextMenus>\n \n export const enum TCDirection {\n center,\n@@ -238,10 +30,6 @@ export const enum TCDirection {\n /** '' means no preload */\n export type PreloadSource = '' | 'clipboard' | 'selection'\n \n-export type DictConfigs = DeepReadonly<DictConfigsMutable>\n-\n-export type DictConfigsMutable = typeof allDicts\n-\n export type AppConfig = DeepReadonly<AppConfigMutable>\n \n export interface AppConfigMutable {\n@@ -418,7 +206,7 @@ export function appConfigFactory (): AppConfig {\n },\n en: {\n dict: '',\n- list: ['bing', 'dictcn', 'howjsay', 'macmillan', 'eudic', 'urban'],\n+ list: ['bing', 'dictcn', 'macmillan', 'urban'],\n accent: 'uk' as ('us' | 'uk')\n }\n },\n@@ -426,11 +214,11 @@ export function appConfigFactory (): AppConfig {\n dicts: {\n selected: ['bing', 'urban', 'vocabulary', 'dictcn'],\n // settings of each dict will be auto-generated\n- all: cloneDeep(allDicts)\n+ all: getALlDicts()\n },\n contextMenus: {\n selected: ['oxford', 'google_translate', 'merriam_webster', 'cambridge', 'google_search', 'google_page_translate', 'youdao_page_translate'],\n- all: cloneDeep(allContextMenus)\n+ all: getAllContextMenus()\n }\n }\n }\n"]
| 1 |
["d986b530775edd8ef1f4e445a5d4b0016f409722"]
|
["refactor"]
|
add workingDirectory option to shell.openExternal() (#15065)
Allows passing `workingDirectory` to the underlying `ShellExecuteW` API on Windows._x000D_
_x000D_
the motivation is that by default `ShellExecute` would use the current working directory, which would get locked on Windows and can prevent autoUpdater from working correctly. We need to be able specify a different `workingDirectory` to prevent this situation.
|
["diff --git a/atom/browser/atom_browser_client.cc b/atom/browser/atom_browser_client.cc\nindex 97e5f26..df0774b 100644\n--- a/atom/browser/atom_browser_client.cc\n+++ b/atom/browser/atom_browser_client.cc\n@@ -611,7 +611,7 @@ void OnOpenExternal(const GURL& escaped_url, bool allowed) {\n #else\n escaped_url,\n #endif\n- true);\n+ platform_util::OpenExternalOptions());\n }\n \n void HandleExternalProtocolInUI(\ndiff --git a/atom/common/api/atom_api_shell.cc b/atom/common/api/atom_api_shell.cc\nindex 1323cd6..7c67c7a 100644\n--- a/atom/common/api/atom_api_shell.cc\n+++ b/atom/common/api/atom_api_shell.cc\n@@ -60,11 +60,12 @@ bool OpenExternal(\n const GURL& url,\n #endif\n mate::Arguments* args) {\n- bool activate = true;\n+ platform_util::OpenExternalOptions options;\n if (args->Length() >= 2) {\n- mate::Dictionary options;\n- if (args->GetNext(&options)) {\n- options.Get(\"activate\", &activate);\n+ mate::Dictionary obj;\n+ if (args->GetNext(&obj)) {\n+ obj.Get(\"activate\", &options.activate);\n+ obj.Get(\"workingDirectory\", &options.working_dir);\n }\n }\n \n@@ -72,13 +73,13 @@ bool OpenExternal(\n base::Callback<void(v8::Local<v8::Value>)> callback;\n if (args->GetNext(&callback)) {\n platform_util::OpenExternal(\n- url, activate,\n+ url, options,\n base::Bind(&OnOpenExternalFinished, args->isolate(), callback));\n return true;\n }\n }\n \n- return platform_util::OpenExternal(url, activate);\n+ return platform_util::OpenExternal(url, options);\n }\n \n #if defined(OS_WIN)\ndiff --git a/atom/common/platform_util.h b/atom/common/platform_util.h\nindex 6fd8405..6686a4f 100644\n--- a/atom/common/platform_util.h\n+++ b/atom/common/platform_util.h\n@@ -8,6 +8,7 @@\n #include <string>\n \n #include \"base/callback_forward.h\"\n+#include \"base/files/file_path.h\"\n #include \"build/build_config.h\"\n \n #if defined(OS_WIN)\n@@ -16,10 +17,6 @@\n \n class GURL;\n \n-namespace base {\n-class FilePath;\n-}\n-\n namespace platform_util {\n \n typedef base::Callback<void(const std::string&)> OpenExternalCallback;\n@@ -32,6 +29,11 @@ bool ShowItemInFolder(const base::FilePath& full_path);\n // Must be called from the UI thread.\n bool OpenItem(const base::FilePath& full_path);\n \n+struct OpenExternalOptions {\n+ bool activate = true;\n+ base::FilePath working_dir;\n+};\n+\n // Open the given external protocol URL in the desktop's default manner.\n // (For example, mailto: URLs in the default mail user agent.)\n bool OpenExternal(\n@@ -40,7 +42,7 @@ bool OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate);\n+ const OpenExternalOptions& options);\n \n // The asynchronous version of OpenExternal.\n void OpenExternal(\n@@ -49,7 +51,7 @@ void OpenExternal(\n #else\n const GURL& url,\n #endif\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback);\n \n // Move a file to trash.\ndiff --git a/atom/common/platform_util_linux.cc b/atom/common/platform_util_linux.cc\nindex 63ee0bd..f17cbda 100644\n--- a/atom/common/platform_util_linux.cc\n+++ b/atom/common/platform_util_linux.cc\n@@ -80,7 +80,7 @@ bool OpenItem(const base::FilePath& full_path) {\n return XDGOpen(full_path.value(), false);\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n // Don't wait for exit, since we don't want to wait for the browser/email\n // client window to close before returning\n if (url.SchemeIs(\"mailto\"))\n@@ -90,10 +90,10 @@ bool OpenExternal(const GURL& url, bool activate) {\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_mac.mm b/atom/common/platform_util_mac.mm\nindex b83b1e1..4cda8bf 100644\n--- a/atom/common/platform_util_mac.mm\n+++ b/atom/common/platform_util_mac.mm\n@@ -139,16 +139,16 @@ bool OpenItem(const base::FilePath& full_path) {\n launchIdentifiers:NULL];\n }\n \n-bool OpenExternal(const GURL& url, bool activate) {\n+bool OpenExternal(const GURL& url, const OpenExternalOptions& options) {\n DCHECK([NSThread isMainThread]);\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (ns_url)\n- return OpenURL(ns_url, activate).empty();\n+ return OpenURL(ns_url, options.activate).empty();\n return false;\n }\n \n void OpenExternal(const GURL& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n NSURL* ns_url = net::NSURLWithGURL(url);\n if (!ns_url) {\n@@ -157,13 +157,13 @@ void OpenExternal(const GURL& url,\n }\n \n __block OpenExternalCallback c = callback;\n- dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0),\n- ^{\n- __block std::string error = OpenURL(ns_url, activate);\n- dispatch_async(dispatch_get_main_queue(), ^{\n- c.Run(error);\n- });\n- });\n+ dispatch_async(\n+ dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{\n+ __block std::string error = OpenURL(ns_url, options.activate);\n+ dispatch_async(dispatch_get_main_queue(), ^{\n+ c.Run(error);\n+ });\n+ });\n }\n \n bool MoveItemToTrash(const base::FilePath& full_path) {\ndiff --git a/atom/common/platform_util_win.cc b/atom/common/platform_util_win.cc\nindex 34576be..5712200 100644\n--- a/atom/common/platform_util_win.cc\n+++ b/atom/common/platform_util_win.cc\n@@ -294,15 +294,18 @@ bool OpenItem(const base::FilePath& full_path) {\n return ui::win::OpenFileViaShell(full_path);\n }\n \n-bool OpenExternal(const base::string16& url, bool activate) {\n+bool OpenExternal(const base::string16& url,\n+ const OpenExternalOptions& options) {\n // Quote the input scheme to be sure that the command does not have\n // parameters unexpected by the external program. This url should already\n // have been escaped.\n base::string16 escaped_url = L\"\\\"\" + url + L\"\\\"\";\n+ auto working_dir = options.working_dir.value();\n \n- if (reinterpret_cast<ULONG_PTR>(ShellExecuteW(\n- NULL, L\"open\", escaped_url.c_str(), NULL, NULL, SW_SHOWNORMAL)) <=\n- 32) {\n+ if (reinterpret_cast<ULONG_PTR>(\n+ ShellExecuteW(nullptr, L\"open\", escaped_url.c_str(), nullptr,\n+ working_dir.empty() ? nullptr : working_dir.c_str(),\n+ SW_SHOWNORMAL)) <= 32) {\n // We fail to execute the call. We could display a message to the user.\n // TODO(nsylvain): we should also add a dialog to warn on errors. See\n // bug 1136923.\n@@ -312,10 +315,10 @@ bool OpenExternal(const base::string16& url, bool activate) {\n }\n \n void OpenExternal(const base::string16& url,\n- bool activate,\n+ const OpenExternalOptions& options,\n const OpenExternalCallback& callback) {\n // TODO(gabriel): Implement async open if callback is specified\n- callback.Run(OpenExternal(url, activate) ? \"\" : \"Failed to open\");\n+ callback.Run(OpenExternal(url, options) ? \"\" : \"Failed to open\");\n }\n \n bool MoveItemToTrash(const base::FilePath& path) {\ndiff --git a/docs/api/shell.md b/docs/api/shell.md\nindex a469f94..b38348a 100644\n--- a/docs/api/shell.md\n+++ b/docs/api/shell.md\n@@ -37,9 +37,10 @@ Open the given file in the desktop's default manner.\n ### `shell.openExternal(url[, options, callback])`\n \n * `url` String - Max 2081 characters on windows, or the function returns false.\n-* `options` Object (optional) _macOS_\n- * `activate` Boolean - `true` to bring the opened application to the\n- foreground. The default is `true`.\n+* `options` Object (optional)\n+ * `activate` Boolean (optional) - `true` to bring the opened application to the\n+ foreground. The default is `true`. _macOS_\n+ * `workingDirectory` String (optional) - The working directory. _Windows_\n * `callback` Function (optional) _macOS_ - If specified will perform the open asynchronously.\n * `error` Error\n \n"]
| 1 |
["a9475f359061fcd6cd53557599fedf0df5e9ee00"]
|
["feat"]
|
stop playing audio on panel close
Closes #824
|
["diff --git a/src/background/audio-manager.ts b/src/background/audio-manager.ts\nindex 84032f1..9e116fc 100644\n--- a/src/background/audio-manager.ts\n+++ b/src/background/audio-manager.ts\n@@ -1,4 +1,4 @@\n-import { timeout } from '@/_helpers/promise-more'\n+import { timer } from '@/_helpers/promise-more'\n \n /**\n * To make sure only one audio plays at a time\n@@ -16,6 +16,8 @@ export class AudioManager {\n \n private audio?: HTMLAudioElement\n \n+ currentSrc?: string\n+\n reset() {\n if (this.audio) {\n this.audio.pause()\n@@ -23,28 +25,33 @@ export class AudioManager {\n this.audio.src = ''\n this.audio.onended = null\n }\n+ this.currentSrc = ''\n }\n \n load(src: string): HTMLAudioElement {\n this.reset()\n+ this.currentSrc = src\n return (this.audio = new Audio(src))\n }\n \n async play(src?: string): Promise<void> {\n- if (!src) {\n+ if (!src || src === this.currentSrc) {\n this.reset()\n return\n }\n \n const audio = this.load(src)\n \n- const onEnd = new Promise(resolve => {\n- audio.onended = resolve\n- })\n+ const onEnd = Promise.race([\n+ new Promise(resolve => {\n+ audio.onended = resolve\n+ }),\n+ timer(20000)\n+ ])\n+\n+ await audio.play()\n+ await onEnd\n \n- await audio\n- .play()\n- .then(() => timeout(onEnd, 4000))\n- .catch(() => {})\n+ this.currentSrc = ''\n }\n }\ndiff --git a/src/background/server.ts b/src/background/server.ts\nindex 65f6f6c..4c70196 100644\n--- a/src/background/server.ts\n+++ b/src/background/server.ts\n@@ -64,6 +64,9 @@ export class BackgroundServer {\n return openURL(msg.payload.url, msg.payload.self)\n case 'PLAY_AUDIO':\n return AudioManager.getInstance().play(msg.payload)\n+ case 'STOP_AUDIO':\n+ AudioManager.getInstance().reset()\n+ return\n case 'FETCH_DICT_RESULT':\n return this.fetchDictResult(msg.payload)\n case 'DICT_ENGINE_METHOD':\n@@ -79,6 +82,7 @@ export class BackgroundServer {\n case 'OPEN_QS_PANEL':\n return this.openQSPanel()\n case 'CLOSE_QS_PANEL':\n+ AudioManager.getInstance().reset()\n return this.qsPanelManager.destroy()\n case 'QS_SWITCH_SIDEBAR':\n return this.qsPanelManager.toggleSidebar(msg.payload)\n@@ -105,6 +109,16 @@ export class BackgroundServer {\n return this.youdaoTranslateAjax(msg.payload)\n }\n })\n+\n+ browser.runtime.onConnect.addListener(port => {\n+ if (port.name === 'popup') {\n+ // This is a workaround for browser action page\n+ // which does not fire beforeunload event\n+ port.onDisconnect.addListener(() => {\n+ AudioManager.getInstance().reset()\n+ })\n+ }\n+ })\n }\n \n async openQSPanel(): Promise<void> {\ndiff --git a/src/content/redux/epics/index.ts b/src/content/redux/epics/index.ts\nindex b941c07..587b54d 100644\n--- a/src/content/redux/epics/index.ts\n+++ b/src/content/redux/epics/index.ts\n@@ -1,6 +1,6 @@\n import { combineEpics } from 'redux-observable'\n import { from, of, EMPTY } from 'rxjs'\n-import { map, mapTo, mergeMap, filter } from 'rxjs/operators'\n+import { map, mapTo, mergeMap, filter, pairwise } from 'rxjs/operators'\n \n import { isPopupPage, isStandalonePage } from '@/_helpers/saladict'\n import { saveWord } from '@/_helpers/record-manager'\n@@ -11,6 +11,7 @@ import { ofType } from './utils'\n import searchStartEpic from './searchStart.epic'\n import newSelectionEpic from './newSelection.epic'\n import { translateCtxs, genCtxText } from '@/_helpers/translateCtx'\n+import { message } from '@/_helpers/browser-api'\n \n export const epics = combineEpics<StoreAction, StoreAction, StoreState>(\n /** Start searching text. This will also send to Redux. */\n@@ -28,6 +29,17 @@ export const epics = combineEpics<StoreAction, StoreAction, StoreState>(\n )\n ),\n (action$, state$) =>\n+ state$.pipe(\n+ map(state => state.isShowDictPanel),\n+ pairwise(),\n+ mergeMap(([oldShow, newShow]) => {\n+ if (oldShow && !newShow) {\n+ message.send({ type: 'STOP_AUDIO' })\n+ }\n+ return EMPTY\n+ })\n+ ),\n+ (action$, state$) =>\n action$.pipe(\n ofType('ADD_TO_NOTEBOOK'),\n mergeMap(() => {\ndiff --git a/src/popup/index.tsx b/src/popup/index.tsx\nindex cbca1c0..a406bfd 100644\n--- a/src/popup/index.tsx\n+++ b/src/popup/index.tsx\n@@ -21,6 +21,10 @@ import Popup from './Popup'\n import Notebook from './Notebook'\n import './_style.scss'\n \n+// This is a workaround for browser action page\n+// which does not fire beforeunload event\n+browser.runtime.connect({ name: 'popup' } as any) // wrong typing\n+\n const Title: FC = () => {\n const { t } = useTranslate('popup')\n return (\ndiff --git a/src/typings/message.ts b/src/typings/message.ts\nindex bdd6fad..63238cb 100644\n--- a/src/typings/message.ts\n+++ b/src/typings/message.ts\n@@ -146,6 +146,8 @@ export type MessageConfig = MessageConfigType<{\n payload: string\n }\n \n+ STOP_AUDIO: {}\n+\n LAST_PLAY_AUDIO: {\n response?: null | { src: string; timestamp: number }\n }\n"]
| 1 |
["97cabf49e7aca7754edde247003fbcb4ea42dd59"]
|
["fix"]
|
add link to roadmap
|
["diff --git a/packages/plugin-core/README.md b/packages/plugin-core/README.md\nindex 3c25c9b..c7506d4 100644\n--- a/packages/plugin-core/README.md\n+++ b/packages/plugin-core/README.md\n@@ -187,6 +187,10 @@ When the workspace opens, it will show dialogue to install the recommended exten\n \n See [[FAQ]] to answers for common questions.\n \n+# Roadmap\n+\n+Check out our [public roadmap](https://github.com/orgs/dendronhq/projects/1) to see the features we're working on and to vote for what you want to see next. \n+\n \n # Contributing\n \n"]
| 1 |
["94202f01e44c58bee4419044f8a18ac5f1a50dff"]
|
["docs"]
|
reuse backup id parameter definition
|
["diff --git a/dist/src/main/resources/api/backup-management-api.yaml b/dist/src/main/resources/api/backup-management-api.yaml\nindex d09fce8..499b0b6 100644\n--- a/dist/src/main/resources/api/backup-management-api.yaml\n+++ b/dist/src/main/resources/api/backup-management-api.yaml\n@@ -65,13 +65,7 @@ paths:\n summary: Get information of a backup\n description: A detailed information of the backup with the give backup id.\n parameters:\n- - name: backupId\n- in : path\n- description: Backup ID\n- required: true\n- schema:\n- type: integer\n- format: int64\n+ - $ref: '#/components/parameters/BackupId'\n responses:\n '200':\n description: OK\n@@ -92,13 +86,7 @@ paths:\n summary: Delete a backup\n description: Delete a backup with the given id\n parameters:\n- - name: backupId\n- in: path\n- description: Backup ID\n- required: true\n- schema:\n- type: integer\n- format: int64\n+ - $ref: '#/components/parameters/BackupId'\n responses:\n '204':\n description: Backup is deleted\n@@ -110,6 +98,15 @@ paths:\n $ref: '#/components/responses/Error'\n \n components:\n+ parameters:\n+ BackupId:\n+ name: backupId\n+ required: true\n+ in: path\n+ description: Id of the backup\n+ schema:\n+ $ref: '#/components/schemas/BackupId'\n+\n responses:\n Error:\n description: Generic error response\n"]
| 1 |
["f7ee0c0983c49a39a6780a751c112904c3b234ca"]
|
["refactor"]
|
Publish crates
|
["diff --git a/CHANGELOG.md b/CHANGELOG.md\nindex 7b98b44..f17ad6f 100644\n--- a/CHANGELOG.md\n+++ b/CHANGELOG.md\n@@ -7,6 +7,9 @@\n \n - **(css/parser)** Fix parsing of at rules (#3328) ([506a310](https://github.com/swc-project/swc/commit/506a31078aaebf50129658f096bbd5929995205f))\n \n+\n+- **(es/compat)** Fix regression of `destructuring` (#3326) ([6d1ad36](https://github.com/swc-project/swc/commit/6d1ad368aca53ee64a63ae565cd015909f2f4458))\n+\n ### Performance\n \n \ndiff --git a/Cargo.lock b/Cargo.lock\nindex 3c6598b..4baa252 100644\n--- a/Cargo.lock\n+++ b/Cargo.lock\n@@ -2652,7 +2652,7 @@ dependencies = [\n \n [[package]]\n name = \"swc\"\n-version = \"0.116.15\"\n+version = \"0.116.16\"\n dependencies = [\n \"ahash\",\n \"anyhow\",\n@@ -3097,7 +3097,7 @@ dependencies = [\n \n [[package]]\n name = \"swc_ecma_transforms\"\n-version = \"0.113.3\"\n+version = \"0.113.4\"\n dependencies = [\n \"pretty_assertions 0.7.2\",\n \"sourcemap\",\n@@ -3157,7 +3157,7 @@ dependencies = [\n \n [[package]]\n name = \"swc_ecma_transforms_compat\"\n-version = \"0.68.2\"\n+version = \"0.68.3\"\n dependencies = [\n \"ahash\",\n \"arrayvec 0.7.2\",\n@@ -3366,7 +3366,7 @@ dependencies = [\n \n [[package]]\n name = \"swc_ecmascript\"\n-version = \"0.110.14\"\n+version = \"0.110.15\"\n dependencies = [\n \"swc_ecma_ast\",\n \"swc_ecma_codegen\",\ndiff --git a/crates/swc/Cargo.toml b/crates/swc/Cargo.toml\nindex 756cfc8..2f02d22 100644\n--- a/crates/swc/Cargo.toml\n+++ b/crates/swc/Cargo.toml\n@@ -9,7 +9,7 @@ include = [\"Cargo.toml\", \"src/**/*.rs\"]\n license = \"Apache-2.0\"\n name = \"swc\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.116.15\"\n+version = \"0.116.16\"\n \n [lib]\n name = \"swc\"\n@@ -55,7 +55,7 @@ swc_ecma_loader = {version = \"0.27.0\", path = \"../swc_ecma_loader\", features = [\n swc_ecma_minifier = {version = \"0.70.9\", path = \"../swc_ecma_minifier\"}\n swc_ecma_parser = {version = \"0.87.0\", path = \"../swc_ecma_parser\"}\n swc_ecma_preset_env = {version = \"0.86.1\", path = \"../swc_ecma_preset_env\"}\n-swc_ecma_transforms = {version = \"0.113.3\", path = \"../swc_ecma_transforms\", features = [\n+swc_ecma_transforms = {version = \"0.113.4\", path = \"../swc_ecma_transforms\", features = [\n \"compat\",\n \"module\",\n \"optimization\",\n@@ -64,11 +64,11 @@ swc_ecma_transforms = {version = \"0.113.3\", path = \"../swc_ecma_transforms\", fea\n \"typescript\",\n ]}\n swc_ecma_transforms_base = {version = \"0.57.1\", path = \"../swc_ecma_transforms_base\"}\n-swc_ecma_transforms_compat = {version = \"0.68.2\", path = \"../swc_ecma_transforms_compat\"}\n+swc_ecma_transforms_compat = {version = \"0.68.3\", path = \"../swc_ecma_transforms_compat\"}\n swc_ecma_transforms_optimization = {version = \"0.83.0\", path = \"../swc_ecma_transforms_optimization\"}\n swc_ecma_utils = {version = \"0.64.0\", path = \"../swc_ecma_utils\"}\n swc_ecma_visit = {version = \"0.51.1\", path = \"../swc_ecma_visit\"}\n-swc_ecmascript = {version = \"0.110.14\", path = \"../swc_ecmascript\"}\n+swc_ecmascript = {version = \"0.110.15\", path = \"../swc_ecmascript\"}\n swc_node_comments = {version = \"0.4.0\", path = \"../swc_node_comments\"}\n swc_plugin_runner = {version = \"0.30.0\", path = \"../swc_plugin_runner\", optional = true}\n swc_visit = {version = \"0.3.0\", path = \"../swc_visit\"}\ndiff --git a/crates/swc_ecma_transforms/Cargo.toml b/crates/swc_ecma_transforms/Cargo.toml\nindex 1604f4e..a0aafae 100644\n--- a/crates/swc_ecma_transforms/Cargo.toml\n+++ b/crates/swc_ecma_transforms/Cargo.toml\n@@ -6,7 +6,7 @@ edition = \"2021\"\n license = \"Apache-2.0\"\n name = \"swc_ecma_transforms\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.113.3\"\n+version = \"0.113.4\"\n \n [package.metadata.docs.rs]\n all-features = true\n@@ -28,7 +28,7 @@ swc_common = {version = \"0.17.0\", path = \"../swc_common\"}\n swc_ecma_ast = {version = \"0.65.0\", path = \"../swc_ecma_ast\"}\n swc_ecma_parser = {version = \"0.87.0\", path = \"../swc_ecma_parser\"}\n swc_ecma_transforms_base = {version = \"0.57.1\", path = \"../swc_ecma_transforms_base\"}\n-swc_ecma_transforms_compat = {version = \"0.68.2\", path = \"../swc_ecma_transforms_compat\", optional = true}\n+swc_ecma_transforms_compat = {version = \"0.68.3\", path = \"../swc_ecma_transforms_compat\", optional = true}\n swc_ecma_transforms_module = {version = \"0.74.0\", path = \"../swc_ecma_transforms_module\", optional = true}\n swc_ecma_transforms_optimization = {version = \"0.83.0\", path = \"../swc_ecma_transforms_optimization\", optional = true}\n swc_ecma_transforms_proposal = {version = \"0.74.0\", path = \"../swc_ecma_transforms_proposal\", optional = true}\ndiff --git a/crates/swc_ecma_transforms_compat/Cargo.toml b/crates/swc_ecma_transforms_compat/Cargo.toml\nindex 0ea6609..58374e3 100644\n--- a/crates/swc_ecma_transforms_compat/Cargo.toml\n+++ b/crates/swc_ecma_transforms_compat/Cargo.toml\n@@ -6,7 +6,7 @@ edition = \"2021\"\n license = \"Apache-2.0\"\n name = \"swc_ecma_transforms_compat\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.68.2\"\n+version = \"0.68.3\"\n # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html\n \n [features]\ndiff --git a/crates/swc_ecmascript/Cargo.toml b/crates/swc_ecmascript/Cargo.toml\nindex 63680a0..775208a 100644\n--- a/crates/swc_ecmascript/Cargo.toml\n+++ b/crates/swc_ecmascript/Cargo.toml\n@@ -6,7 +6,7 @@ edition = \"2021\"\n license = \"Apache-2.0\"\n name = \"swc_ecmascript\"\n repository = \"https://github.com/swc-project/swc.git\"\n-version = \"0.110.14\"\n+version = \"0.110.15\"\n \n [package.metadata.docs.rs]\n all-features = true\n@@ -39,7 +39,7 @@ swc_ecma_dep_graph = {version = \"0.58.0\", path = \"../swc_ecma_dep_graph\", option\n swc_ecma_minifier = {version = \"0.70.9\", path = \"../swc_ecma_minifier\", optional = true}\n swc_ecma_parser = {version = \"0.87.0\", path = \"../swc_ecma_parser\", optional = true, default-features = false}\n swc_ecma_preset_env = {version = \"0.86.1\", path = \"../swc_ecma_preset_env\", optional = true}\n-swc_ecma_transforms = {version = \"0.113.3\", path = \"../swc_ecma_transforms\", optional = true}\n+swc_ecma_transforms = {version = \"0.113.4\", path = \"../swc_ecma_transforms\", optional = true}\n swc_ecma_utils = {version = \"0.64.0\", path = \"../swc_ecma_utils\", optional = true}\n swc_ecma_visit = {version = \"0.51.1\", path = \"../swc_ecma_visit\", optional = true}\n \n"]
| 1 |
["af53b9487f74ff28438928903fb1f2db93fe4fa8"]
|
["build"]
|
disable getGPUInfo() tests on Linux (#14875)
|
["diff --git a/spec/api-app-spec.js b/spec/api-app-spec.js\nindex 4ca1fa3..6ab6bd0 100644\n--- a/spec/api-app-spec.js\n+++ b/spec/api-app-spec.js\n@@ -805,6 +805,14 @@ describe('app module', () => {\n })\n \n describe('getGPUInfo() API', () => {\n+ before(function () {\n+ // TODO(alexeykuzmoin): Fails on linux. Enable them back.\n+ // https://github.com/electron/electron/pull/14863\n+ if (process.platform === 'linux') {\n+ this.skip()\n+ }\n+ })\n+\n it('succeeds with basic GPUInfo', (done) => {\n app.getGPUInfo('basic').then((gpuInfo) => {\n // Devices information is always present in the available info\n"]
| 1 |
["60ac03c08f942a8dda49b9f9f7d2ce7a63535414"]
|
["test"]
|
add more tests for Utils.lookupPathFromDecorator
|
["diff --git a/lib/utils/Utils.ts b/lib/utils/Utils.ts\nindex 6de6e05..b03b3e9 100644\n--- a/lib/utils/Utils.ts\n+++ b/lib/utils/Utils.ts\n@@ -338,15 +338,8 @@ export class Utils {\n line++;\n }\n \n- if (stack[line].match(/\\(.+\\)/i)) {\n- meta.path = Utils.normalizePath(\n- stack[line].match(/\\((.*):\\d+:\\d+\\)/)![1],\n- );\n- } else {\n- meta.path = Utils.normalizePath(\n- stack[line].match(/at\\s*(.*):\\d+:\\d+$/)![1],\n- );\n- }\n+ const re = stack[line].match(/\\(.+\\)/i) ? /\\((.*):\\d+:\\d+\\)/ : /at\\s*(.*):\\d+:\\d+$/;\n+ meta.path = Utils.normalizePath(stack[line].match(re)![1]);\n \n return meta.path;\n }\ndiff --git a/tests/Utils.test.ts b/tests/Utils.test.ts\nindex c3e9aa1..4d2a209 100644\n--- a/tests/Utils.test.ts\n+++ b/tests/Utils.test.ts\n@@ -256,7 +256,7 @@ describe('Utils', () => {\n ' at Object.__decorate (/usr/local/var/www/my-project/node_modules/tslib/tslib.js:92:96)',\n ' at Object.<anonymous> (/usr/local/var/www/my-project/dist/entities/Customer.js:20:9)',\n ' at Module._compile (internal/modules/cjs/loader.js:776:30)',\n- ' at Object.Module._extensions..js (internal/modules/cjs/loader.js:787:10)',\n+ ' at Object.Module._extensions.js (internal/modules/cjs/loader.js:787:10)',\n ' at Module.load (internal/modules/cjs/loader.js:643:32)',\n ' at Function.Module._load (internal/modules/cjs/loader.js:556:12)',\n ];\n@@ -272,10 +272,25 @@ describe('Utils', () => {\n ' at Object.<anonymous> (/usr/local/var/www/my-project/src/entities/Customer.ts:9:3)',\n ' at Module._compile (internal/modules/cjs/loader.js:776:30)',\n ' at Module.m._compile (/usr/local/var/www/my-project/node_modules/ts-node/src/index.ts:473:23)',\n- ' at Module._extensions..js (internal/modules/cjs/loader.js:787:10)',\n+ ' at Module._extensions.js (internal/modules/cjs/loader.js:787:10)',\n ' at Object.require.extensions.<computed> [as .ts] (/usr/local/var/www/my-project/node_modules/ts-node/src/index.ts:476:12)',\n ];\n expect(Utils.lookupPathFromDecorator({} as any, stack2)).toBe('/usr/local/var/www/my-project/src/entities/Customer.ts');\n+\n+ // no parens\n+ const stack3 = [\n+ ' at Function.lookupPathFromDecorator (/usr/local/var/www/my-project/node_modules/mikro-orm/dist/utils/Utils.js:170:23)',\n+ ' at /usr/local/var/www/my-project/node_modules/mikro-orm/dist/decorators/PrimaryKey.js:12:23',\n+ ' at DecorateProperty (/usr/local/var/www/my-project/node_modules/reflect-metadata/Reflect.js:553:33)',\n+ ' at Object.decorate (/usr/local/var/www/my-project/node_modules/reflect-metadata/Reflect.js:123:24)',\n+ ' at Object.__decorate (/usr/local/var/www/my-project/node_modules/tslib/tslib.js:92:96)',\n+ ' at /usr/local/var/www/my-project/dist/entities/Customer.js:20:9',\n+ ' at Module._compile (internal/modules/cjs/loader.js:776:30)',\n+ ' at Object.Module._extensions.js (internal/modules/cjs/loader.js:787:10)',\n+ ' at Module.load (internal/modules/cjs/loader.js:643:32)',\n+ ' at Function.Module._load (internal/modules/cjs/loader.js:556:12)',\n+ ];\n+ expect(Utils.lookupPathFromDecorator({} as any, stack3)).toBe('/usr/local/var/www/my-project/dist/entities/Customer.js');\n });\n \n test('lookup path from decorator on windows', () => {\n@@ -287,7 +302,7 @@ describe('Utils', () => {\n ' at Object.<anonymous> (C:\\\\www\\\\my-project\\\\src\\\\entities\\\\Customer.ts:7:5)',\n ' at Module._compile (internal/modules/cjs/loader.js:936:30)',\n ' at Module.m._compile (C:\\\\www\\\\my-project\\\\node_modules\\\\ts-node\\\\src\\\\index.ts:493:23)',\n- ' at Module._extensions..js (internal/modules/cjs/loader.js:947:10)',\n+ ' at Module._extensions.js (internal/modules/cjs/loader.js:947:10)',\n ' at Object.require.extensions.<computed> [as .ts] (C:\\\\www\\\\my-project\\\\node_modules\\\\ts-node\\\\src\\\\index.ts:496:12)',\n ' at Module.load (internal/modules/cjs/loader.js:790:32)',\n ' at Function.Module._load (internal/modules/cjs/loader.js:703:12)',\n"]
| 1 |
["c5e86dbc00a13a355bffadeb2db197e2fea5640f"]
|
["test"]
|
fixed start types for size and opacity
|
["diff --git a/core/main/src/Core/Particle.ts b/core/main/src/Core/Particle.ts\nindex 1aa6fba..6ea6ffc 100644\n--- a/core/main/src/Core/Particle.ts\n+++ b/core/main/src/Core/Particle.ts\n@@ -271,7 +271,7 @@ export class Particle implements IParticle {\n }\n }\n \n- const sizeAnimation = this.options.size.animation;\n+ const sizeAnimation = sizeOptions.animation;\n \n if (sizeAnimation.enable) {\n this.size.status = AnimationStatus.increasing;\n@@ -279,7 +279,8 @@ export class Particle implements IParticle {\n if (!randomSize) {\n switch (sizeAnimation.startValue) {\n case StartValueType.min:\n- this.size.value = sizeAnimation.minimumValue * pxRatio;\n+ this.size.value = NumberUtils.getRangeMin(sizeOptions.value) * pxRatio;\n+ this.size.status = AnimationStatus.increasing;\n \n break;\n \n@@ -287,11 +288,14 @@ export class Particle implements IParticle {\n this.size.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(sizeAnimation.minimumValue * pxRatio, this.size.value)\n );\n+ this.size.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.size.value = NumberUtils.getRangeMax(sizeOptions.value) * pxRatio;\n this.size.status = AnimationStatus.decreasing;\n \n break;\n@@ -393,7 +397,8 @@ export class Particle implements IParticle {\n if (!randomOpacity) {\n switch (opacityAnimation.startValue) {\n case StartValueType.min:\n- this.opacity.value = opacityAnimation.minimumValue;\n+ this.opacity.value = NumberUtils.getRangeMin(this.opacity.value);\n+ this.opacity.status = AnimationStatus.increasing;\n \n break;\n \n@@ -401,11 +406,14 @@ export class Particle implements IParticle {\n this.opacity.value = NumberUtils.randomInRange(\n NumberUtils.setRangeValue(opacityAnimation.minimumValue, this.opacity.value)\n );\n+ this.opacity.status =\n+ Math.random() >= 0.5 ? AnimationStatus.increasing : AnimationStatus.decreasing;\n \n break;\n \n case StartValueType.max:\n default:\n+ this.opacity.value = NumberUtils.getRangeMax(this.opacity.value);\n this.opacity.status = AnimationStatus.decreasing;\n \n break;\ndiff --git a/presets/confetti/src/options.ts b/presets/confetti/src/options.ts\nindex 7fc6225..a713425 100644\n--- a/presets/confetti/src/options.ts\n+++ b/presets/confetti/src/options.ts\n@@ -28,7 +28,7 @@ export const loadOptions = (confettiOptions: RecursivePartial<IConfettiOptions>)\n animation: {\n enable: true,\n minimumValue: 0,\n- speed: 2,\n+ speed: 0.5,\n startValue: \"max\",\n destroy: \"min\",\n },\n"]
| 1 |
["06960183db42cba1b1f1a8077660ba8c801c9e18"]
|
["fix"]
|
handle default_branch_monthly_cost having no cost
Closes https://github.com/infracost/infracost-gh-action/issues/17
|
["diff --git a/scripts/ci/diff.sh b/scripts/ci/diff.sh\nindex 7472273..fa48e4b 100755\n--- a/scripts/ci/diff.sh\n+++ b/scripts/ci/diff.sh\n@@ -112,7 +112,12 @@ echo \"$default_branch_output\" > default_branch_infracost.txt\n default_branch_monthly_cost=$(cat default_branch_infracost.txt | awk '/OVERALL TOTAL/ { gsub(\",\",\"\"); printf(\"%.2f\",$NF) }')\n echo \"::set-output name=default_branch_monthly_cost::$default_branch_monthly_cost\"\n \n-percent_diff=$(echo \"scale=4; $current_branch_monthly_cost / $default_branch_monthly_cost * 100 - 100\" | bc)\n+if [ $(echo \"$default_branch_monthly_cost > 0\" | bc -l) = 1 ]; then\n+ percent_diff=$(echo \"scale=4; $current_branch_monthly_cost / $default_branch_monthly_cost * 100 - 100\" | bc)\n+else\n+ echo \"Default branch has no cost, setting percent_diff=100 to force a comment\"\n+ percent_diff=100\n+fi\n absolute_percent_diff=$(echo $percent_diff | tr -d -)\n \n if [ $(echo \"$absolute_percent_diff > $percentage_threshold\" | bc -l) = 1 ]; then\n"]
| 1 |
["9474f58b44a35321e9157ca9890c589a7b3729b2"]
|
["fix"]
|
add instruction for finding version
|
["diff --git a/.github/ISSUE_TEMPLATE/_bug_report_chs.md b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\nindex 42a2e0f..44a33db 100644\n--- a/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n+++ b/.github/ISSUE_TEMPLATE/_bug_report_chs.md\n@@ -36,7 +36,7 @@ assignees: ''\n ## \u8bbe\u5907\u4fe1\u606f\n - \u64cd\u4f5c\u7cfb\u7edf: [] <!-- \u5982 [Window10] -->\n - \u6d4f\u89c8\u5668\u7248\u672c: [] <!-- \u5982 [Chrome77] -->\n-- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] -->\n+- \u6c99\u62c9\u67e5\u8bcd\u7248\u672c: [] <!-- \u5982 [v7.0.0] \uff08\u5728\u6269\u5c55\u7ba1\u7406\u9875\u9762\u67e5\u770b\uff09 -->\n \n <!-- \u8bf7\u5728\u4e0b\u65b9 ## \u5f00\u5934\u884c\u4e4b\u95f4\u7684\u7a7a\u767d\u5904\u586b\u5199 -->\n \n"]
| 1 |
["af0a5f7ab9d71fe20aa0888f682368f32b26fe18"]
|
["docs"]
|
fixa few issues
|
["diff --git a/README.md b/README.md\nindex d944d22..5099f03 100644\n--- a/README.md\n+++ b/README.md\n@@ -10,9 +10,8 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n \n <!-- toc -->\n \n-* [Motivation](#motivation)\n+* [Introduction](#introduction)\n * [Installation](#installation)\n- + [Setting up a quick project](#setting-up-a-quick-project)\n * [Usage](#usage)\n + [Creating stores](#creating-stores)\n + [Creating reactive views](#creating-reactive-views)\n@@ -35,12 +34,14 @@ React state management with a minimal API. Made with :heart: and ES6 Proxies.\n Easy State consists of two wrapper functions only. `store` creates state stores and `view` creates reactive components, which re-render whenever state stores are mutated. The rest is just plain JavaScript.\n \n ```js\n-import React, from 'react'\n+import React from 'react'\n import { store, view } from 'react-easy-state'\n \n+// stores are normal objects\n const clock = store({ time: new Date() })\n setInterval(() => clock.time = new Date(), 1000)\n \n+// reactive components re-render on store mutations\n function ClockComp () {\n return <div>{clock.time}</div>\n }\n"]
| 1 |
["b8a664c1b10f4e30a3e221a14211a3cdaf90b7f4"]
|
["docs"]
|
remove duplicated code
|
["diff --git a/packages/core/src/components/action-sheet/action-sheet.tsx b/packages/core/src/components/action-sheet/action-sheet.tsx\nindex 7166508..dad7daf 100644\n--- a/packages/core/src/components/action-sheet/action-sheet.tsx\n+++ b/packages/core/src/components/action-sheet/action-sheet.tsx\n@@ -1,9 +1,9 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, isDef, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -23,15 +23,15 @@ import mdLeaveAnimation from './animations/md.leave';\n })\n export class ActionSheet implements OverlayInterface {\n \n+ private presented = false;\n+\n mode: string;\n color: string;\n-\n- private presented = false;\n- private animation: Animation | null = null;\n+ animation: Animation;\n \n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -178,25 +178,8 @@ export class ActionSheet implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- // Check if prop animate is false or if the config for animate is defined/false\n- if (!this.willAnimate || (isDef(this.config.get('willAnimate')) && this.config.get('willAnimate') === false)) {\n- // if the duration is 0, it won't actually animate I don't think\n- // TODO - validate this\n- this.animation = animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n protected buttonClick(button: ActionSheetButton) {\ndiff --git a/packages/core/src/components/alert/alert.tsx b/packages/core/src/components/alert/alert.tsx\nindex 800b77b..bdf4fc5 100644\n--- a/packages/core/src/components/alert/alert.tsx\n+++ b/packages/core/src/components/alert/alert.tsx\n@@ -1,8 +1,8 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n-import { domControllerAsync, playAnimationAsync, autoFocus } from '../../utils/helpers';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { domControllerAsync, autoFocus } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -21,18 +21,19 @@ import mdLeaveAnimation from './animations/md.leave';\n }\n })\n export class Alert implements OverlayInterface {\n- mode: string;\n- color: string;\n \n private presented = false;\n- private animation: Animation | null = null;\n private activeId: string;\n private inputType: string | null = null;\n private hdrId: string;\n \n+ animation: Animation;\n+ mode: string;\n+ color: string;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -264,25 +265,10 @@ export class Alert implements OverlayInterface {\n return values;\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n-\n private renderCheckbox(inputs: AlertInput[]) {\n if (inputs.length === 0) return null;\n \ndiff --git a/packages/core/src/components/loading/loading.tsx b/packages/core/src/components/loading/loading.tsx\nindex f45eaf1..cc4f511 100644\n--- a/packages/core/src/components/loading/loading.tsx\n+++ b/packages/core/src/components/loading/loading.tsx\n@@ -1,13 +1,13 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n import mdEnterAnimation from './animations/md.enter';\n import mdLeaveAnimation from './animations/md.leave';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n @Component({\n tag: 'ion-loading',\n@@ -21,16 +21,17 @@ import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n })\n \n export class Loading implements OverlayInterface {\n- color: string;\n- mode: string;\n \n private presented = false;\n- private animation: Animation;\n private durationTimeout: any;\n \n+ animation: Animation;\n+ color: string;\n+ mode: string;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -199,24 +200,8 @@ export class Loading implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- // if the duration is 0, it won't actually animate I don't think\n- // TODO - validate this\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n hostData() {\ndiff --git a/packages/core/src/components/modal/modal.tsx b/packages/core/src/components/modal/modal.tsx\nindex af50d63..2b7510c 100644\n--- a/packages/core/src/components/modal/modal.tsx\n+++ b/packages/core/src/components/modal/modal.tsx\n@@ -1,10 +1,10 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n import { DomFrameworkDelegate } from '../../utils/dom-framework-delegate';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -25,14 +25,16 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Modal implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private usersComponentElement: HTMLElement;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n+\n @Prop() overlayId: number;\n @Prop({ mutable: true }) delegate: FrameworkDelegate;\n \n@@ -208,22 +210,8 @@ export class Modal implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n @Method()\ndiff --git a/packages/core/src/components/picker/picker.tsx b/packages/core/src/components/picker/picker.tsx\nindex 13faa3e..d70381e 100644\n--- a/packages/core/src/components/picker/picker.tsx\n+++ b/packages/core/src/components/picker/picker.tsx\n@@ -1,9 +1,9 @@\n import { Component, CssClassMap, Element, Event, EventEmitter, Listen, Method, Prop, State } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { getClassMap } from '../../utils/theme';\n-import { OverlayInterface } from '../../utils/overlays';\n+import { OverlayInterface, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -21,16 +21,17 @@ import iosLeaveAnimation from './animations/ios.leave';\n export class Picker implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private durationTimeout: any;\n private mode: string;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n @State() private showSpinner: boolean = null;\n @State() private spinner: string;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -231,22 +232,8 @@ export class Picker implements OverlayInterface {\n return this.columns;\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- })\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, undefined);\n }\n \n private buttonClick(button: PickerButton) {\ndiff --git a/packages/core/src/components/popover/popover.tsx b/packages/core/src/components/popover/popover.tsx\nindex 65031ff..6a47bf6 100644\n--- a/packages/core/src/components/popover/popover.tsx\n+++ b/packages/core/src/components/popover/popover.tsx\n@@ -1,10 +1,10 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, DomController, FrameworkDelegate, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n import { DomFrameworkDelegate } from '../../utils/dom-framework-delegate';\n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses } from '../../utils/theme';\n-import { OverlayInterface, BACKDROP } from '../../utils/overlays';\n+import { OverlayInterface, BACKDROP, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -24,12 +24,13 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Popover implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation;\n private usersComponentElement: HTMLElement;\n \n+ animation: Animation;\n+\n @Element() private el: HTMLElement;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop({ mutable: true }) delegate: FrameworkDelegate;\n@@ -224,22 +225,8 @@ export class Popover implements OverlayInterface {\n });\n }\n \n- private playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el, this.ev).then((animation) => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then(animation => {\n- animation.destroy();\n- this.animation = null;\n- })\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, this.ev);\n }\n \n hostData() {\ndiff --git a/packages/core/src/components/toast/toast.tsx b/packages/core/src/components/toast/toast.tsx\nindex 1afa318..372070a 100644\n--- a/packages/core/src/components/toast/toast.tsx\n+++ b/packages/core/src/components/toast/toast.tsx\n@@ -1,9 +1,9 @@\n import { Component, Element, Event, EventEmitter, Listen, Method, Prop } from '@stencil/core';\n-import { Animation, AnimationBuilder, AnimationController, Config, CssClassMap, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n+import { Animation, AnimationBuilder, Config, CssClassMap, DomController, OverlayDismissEvent, OverlayDismissEventDetail } from '../../index';\n \n-import { domControllerAsync, playAnimationAsync } from '../../utils/helpers';\n+import { domControllerAsync } from '../../utils/helpers';\n import { createThemedClasses, getClassMap } from '../../utils/theme';\n-import { OverlayInterface } from '../../utils/overlays';\n+import { OverlayInterface, overlayAnimation } from '../../utils/overlays';\n \n import iosEnterAnimation from './animations/ios.enter';\n import iosLeaveAnimation from './animations/ios.leave';\n@@ -24,14 +24,14 @@ import mdLeaveAnimation from './animations/md.leave';\n export class Toast implements OverlayInterface {\n \n private presented = false;\n- private animation: Animation | null;\n \n @Element() private el: HTMLElement;\n \n mode: string;\n color: string;\n+ animation: Animation | null;\n \n- @Prop({ connect: 'ion-animation-controller' }) animationCtrl: AnimationController;\n+ @Prop({ connect: 'ion-animation-controller' }) animationCtrl: HTMLIonAnimationControllerElement;\n @Prop({ context: 'config' }) config: Config;\n @Prop({ context: 'dom' }) dom: DomController;\n @Prop() overlayId: number;\n@@ -123,6 +123,22 @@ export class Toast implements OverlayInterface {\n */\n @Event() ionToastDidUnload: EventEmitter<ToastEventDetail>;\n \n+ componentDidLoad() {\n+ this.ionToastDidLoad.emit();\n+ }\n+\n+ componentDidUnload() {\n+ this.ionToastDidUnload.emit();\n+ }\n+\n+ @Listen('ionDismiss')\n+ protected onDismiss(ev: UIEvent) {\n+ ev.stopPropagation();\n+ ev.preventDefault();\n+\n+ this.dismiss();\n+ }\n+\n /**\n * Present the toast overlay after it has been created.\n */\n@@ -169,38 +185,8 @@ export class Toast implements OverlayInterface {\n });\n }\n \n- playAnimation(animationBuilder: AnimationBuilder) {\n- if (this.animation) {\n- this.animation.destroy();\n- this.animation = null;\n- }\n-\n- return this.animationCtrl.create(animationBuilder, this.el, this.position).then(animation => {\n- this.animation = animation;\n- if (!this.willAnimate) {\n- animation.duration(0);\n- }\n- return playAnimationAsync(animation);\n- }).then((animation) => {\n- animation.destroy();\n- this.animation = null;\n- });\n- }\n-\n- componentDidLoad() {\n- this.ionToastDidLoad.emit();\n- }\n-\n- componentDidUnload() {\n- this.ionToastDidUnload.emit();\n- }\n-\n- @Listen('ionDismiss')\n- protected onDismiss(ev: UIEvent) {\n- ev.stopPropagation();\n- ev.preventDefault();\n-\n- this.dismiss();\n+ private playAnimation(animationBuilder: AnimationBuilder): Promise<void> {\n+ return overlayAnimation(this, animationBuilder, this.willAnimate, this.el, this.position);\n }\n \n private wrapperClass(): CssClassMap {\ndiff --git a/packages/core/src/utils/overlays.ts b/packages/core/src/utils/overlays.ts\nindex 8926544..634df43 100644\n--- a/packages/core/src/utils/overlays.ts\n+++ b/packages/core/src/utils/overlays.ts\n@@ -1,3 +1,5 @@\n+import { AnimationBuilder, Animation } from \"..\";\n+import { playAnimationAsync } from \"./helpers\";\n \n let lastId = 1;\n \n@@ -56,8 +58,33 @@ export function removeLastOverlay(overlays: OverlayMap) {\n return toRemove ? toRemove.dismiss() : Promise.resolve();\n }\n \n+export function overlayAnimation(\n+ overlay: OverlayInterface,\n+ animationBuilder: AnimationBuilder,\n+ animate: boolean,\n+ baseEl: HTMLElement,\n+ opts: any\n+): Promise<void> {\n+ if (overlay.animation) {\n+ overlay.animation.destroy();\n+ overlay.animation = null;\n+ }\n+ return overlay.animationCtrl.create(animationBuilder, baseEl, opts).then(animation => {\n+ overlay.animation = animation;\n+ if (!animate) {\n+ animation.duration(0);\n+ }\n+ return playAnimationAsync(animation);\n+ }).then((animation) => {\n+ animation.destroy();\n+ overlay.animation = null;\n+ });\n+}\n+\n export interface OverlayInterface {\n overlayId: number;\n+ animation: Animation;\n+ animationCtrl: HTMLIonAnimationControllerElement;\n \n present(): Promise<void>;\n dismiss(data?: any, role?: string): Promise<void>;\n"]
| 1 |
["9e3f295bbfd4098ffda1ae6656699f60b86c1f92"]
|
["refactor"]
|
fix sonar integration
|
["diff --git a/.ci/scripts/distribution/analyse-java.sh b/.ci/scripts/distribution/analyse-java.sh\nindex a0122f7..0e965df 100755\n--- a/.ci/scripts/distribution/analyse-java.sh\n+++ b/.ci/scripts/distribution/analyse-java.sh\n@@ -23,12 +23,12 @@ else\n fi\n \n if [ \"${GIT_BRANCH}\" == \"master\" ] || [ \"${GIT_BRANCH}\" == \"develop\" ]; then\n- TARGET_BRANCH=\"master\"\n+ TARGET_BRANCH=\"${GIT_BRANCH}\"\n else\n TARGET_BRANCH=\"develop\"\n+ PROPERTIES+=(\"-Dsonar.branch.target=${TARGET_BRANCH}\")\n fi\n \n- PROPERTIES+=(\"-Dsonar.branch.target=${TARGET_BRANCH}\")\n git fetch --no-tags \"${GIT_URL}\" \"+refs/heads/${TARGET_BRANCH}:refs/remotes/origin/${TARGET_BRANCH}\"\n fi\n \ndiff --git a/parent/pom.xml b/parent/pom.xml\nindex f4c3160..d34b41f 100644\n--- a/parent/pom.xml\n+++ b/parent/pom.xml\n@@ -1570,7 +1570,7 @@\n <!-- sonarscanner integration -->\n <!-- sonar.login token must be passed at runtime to avoid sharing token -->\n <sonar.host.url>https://sonarcloud.io</sonar.host.url>\n- <sonar.organization>zeebe-io</sonar.organization>\n+ <sonar.organization>camunda-cloud</sonar.organization>\n <sonar.login>${env.SONARCLOUD_TOKEN}</sonar.login>\n <sonar.links.issue>${project.scm.url}/issues</sonar.links.issue>\n <sonar.cpd.exclusions>\n"]
| 1 |
["6cbbd98dfe6c768dbe49f8d6d2448856a9a86089"]
|
["build"]
|
filters for Rating
Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com>
|
["diff --git a/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts b/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts\nindex 1a626fa..b82e7f6 100644\n--- a/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts\n+++ b/tests/playwright/pages/Dashboard/common/Toolbar/Filter.ts\n@@ -1,6 +1,7 @@\n import { expect } from '@playwright/test';\n import BasePage from '../../../Base';\n import { ToolbarPage } from './index';\n+import { UITypes } from 'nocodb-sdk';\n \n export class ToolbarFilterPage extends BasePage {\n readonly toolbar: ToolbarPage;\n@@ -33,11 +34,13 @@ export class ToolbarFilterPage extends BasePage {\n opType,\n value,\n isLocallySaved,\n+ dataType,\n }: {\n columnTitle: string;\n opType: string;\n value?: string;\n isLocallySaved: boolean;\n+ dataType?: string;\n }) {\n await this.get().locator(`button:has-text(\"Add Filter\")`).first().click();\n \n@@ -86,14 +89,25 @@ export class ToolbarFilterPage extends BasePage {\n \n // if value field was provided, fill it\n if (value) {\n- const fillFilter = this.rootPage.locator('.nc-filter-value-select > input').last().fill(value);\n- await this.waitForResponse({\n- uiAction: fillFilter,\n- httpMethodsToMatch: ['GET'],\n- requestUrlPathToMatch: isLocallySaved ? `/api/v1/db/public/` : `/api/v1/db/data/noco/`,\n- });\n- await this.toolbar.parent.dashboard.waitForLoaderToDisappear();\n- await this.toolbar.parent.waitLoading();\n+ let fillFilter: any = null;\n+ switch (dataType) {\n+ case UITypes.Rating:\n+ await this.get('.nc-filter-value-select')\n+ .locator('.ant-rate-star > div')\n+ .nth(parseInt(value) - 1)\n+ .click();\n+ break;\n+ default:\n+ fillFilter = this.rootPage.locator('.nc-filter-value-select > input').last().fill(value);\n+ await this.waitForResponse({\n+ uiAction: fillFilter,\n+ httpMethodsToMatch: ['GET'],\n+ requestUrlPathToMatch: isLocallySaved ? `/api/v1/db/public/` : `/api/v1/db/data/noco/`,\n+ });\n+ await this.toolbar.parent.dashboard.waitForLoaderToDisappear();\n+ await this.toolbar.parent.waitLoading();\n+ break;\n+ }\n }\n }\n \ndiff --git a/tests/playwright/tests/filters.spec.ts b/tests/playwright/tests/filters.spec.ts\nindex 774a70a..48d949a 100644\n--- a/tests/playwright/tests/filters.spec.ts\n+++ b/tests/playwright/tests/filters.spec.ts\n@@ -36,7 +36,13 @@ async function validateRowArray(param) {\n // }\n }\n \n-async function verifyFilter(param: { column: string; opType: string; value?: string; result: { rowCount: number } }) {\n+async function verifyFilter(param: {\n+ column: string;\n+ opType: string;\n+ value?: string;\n+ result: { rowCount: number };\n+ dataType?: string;\n+}) {\n // if opType was included in skip list, skip it\n if (skipList[param.column]?.includes(param.opType)) {\n return;\n@@ -48,6 +54,7 @@ async function verifyFilter(param: { column: string; opType: string; value?: str\n opType: param.opType,\n value: param.value,\n isLocallySaved: false,\n+ dataType: param?.dataType,\n });\n await toolbar.clickFilter();\n \n@@ -414,4 +421,74 @@ test.describe('Filter Tests: Numerical', () => {\n });\n }\n });\n+\n+ test('Filter: Rating', async () => {\n+ // close 'Team & Auth' tab\n+ await dashboard.closeTab({ title: 'Team & Auth' });\n+ await dashboard.treeView.openTable({ title: 'numberBased' });\n+ const dataType = 'Rating';\n+\n+ const filterList = [\n+ {\n+ op: '=',\n+ value: '3',\n+ rowCount: records.list.filter(r => r[dataType] === 3).length,\n+ },\n+ {\n+ op: '!=',\n+ value: '3',\n+ rowCount: records.list.filter(r => r[dataType] !== 3).length,\n+ },\n+ {\n+ op: 'is null',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] === null).length,\n+ },\n+ {\n+ op: 'is not null',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] !== null).length,\n+ },\n+ {\n+ op: 'is blank',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] === null).length,\n+ },\n+ {\n+ op: 'is not blank',\n+ value: '',\n+ rowCount: records.list.filter(r => r[dataType] !== null).length,\n+ },\n+ {\n+ op: '>',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] > 2 && r[dataType] != null).length,\n+ },\n+ {\n+ op: '>=',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] >= 2 && r[dataType] != null).length,\n+ },\n+ {\n+ op: '<',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] < 2 && r[dataType] != null).length,\n+ },\n+ {\n+ op: '<=',\n+ value: '2',\n+ rowCount: records.list.filter(r => r[dataType] <= 2 && r[dataType] != null).length,\n+ },\n+ ];\n+\n+ for (let i = 0; i < filterList.length; i++) {\n+ await verifyFilter({\n+ column: dataType,\n+ opType: filterList[i].op,\n+ value: filterList[i].value,\n+ result: { rowCount: filterList[i].rowCount },\n+ dataType: dataType,\n+ });\n+ }\n+ });\n });\n"]
| 1 |
["de88de81551d3e2619444a25a68170c9ed35a9b5"]
|
["test"]
|
correct code comment
|
["diff --git a/server/src/db.rs b/server/src/db.rs\nindex bfc5e17..0fb4d55 100644\n--- a/server/src/db.rs\n+++ b/server/src/db.rs\n@@ -389,7 +389,7 @@ impl Db {\n let partition = LockableCatalogPartition::new(Arc::clone(&self), partition);\n \n // Do lock dance to get a write lock on the partition as well\n- // as on all of the chunks\n+ // as on the to-be-dropped chunk.\n let partition = partition.read();\n \n let chunk = self.lockable_chunk(table_name, partition_key, chunk_id)?;\n"]
| 1 |
["cccdd8a43fea7614f78b6f1dcf1765100928a3db"]
|
["docs"]
|
add important to override paragraphs in items
|
["diff --git a/packages/core/src/components/text/text.ios.scss b/packages/core/src/components/text/text.ios.scss\nindex a3c58e2..2a020ab 100644\n--- a/packages/core/src/components/text/text.ios.scss\n+++ b/packages/core/src/components/text/text.ios.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-ios) {\n \n .text-ios-#{$color-name},\n- .text-ios-#{$color-name} a {\n- color: $color-base;\n+ .text-ios-#{$color-name} a,\n+ .text-ios-#{$color-name} p {\n+ color: $color-base !important\n }\n \n }\ndiff --git a/packages/core/src/components/text/text.md.scss b/packages/core/src/components/text/text.md.scss\nindex b397acb..050af1a 100644\n--- a/packages/core/src/components/text/text.md.scss\n+++ b/packages/core/src/components/text/text.md.scss\n@@ -9,8 +9,9 @@\n @each $color-name, $color-base, $color-contrast in get-colors($colors-md) {\n \n .text-md-#{$color-name},\n- .text-md-#{$color-name} a {\n- color: $color-base;\n+ .text-md-#{$color-name} a,\n+ .text-md-#{$color-name} p {\n+ color: $color-base !important;\n }\n \n }\n"]
| 1 |
["7ab363f7ba2807b3eb9895e47f4fcd058f43ae5e"]
|
["test"]
|
use a closure
|
["diff --git a/ibis/expr/analysis.py b/ibis/expr/analysis.py\nindex bb17a7a..975c658 100644\n--- a/ibis/expr/analysis.py\n+++ b/ibis/expr/analysis.py\n@@ -39,7 +39,9 @@ def sub_for(expr, substitutions):\n An Ibis expression\n \"\"\"\n \n- def fn(node, mapping={k.op(): v for k, v in substitutions}):\n+ mapping = {k.op(): v for k, v in substitutions}\n+\n+ def fn(node):\n try:\n return mapping[node]\n except KeyError:\n"]
| 1 |
["ad52e1d67fd77f0b6a73fbf989b33f9abf395ecc"]
|
["refactor"]
|
verify property exist in row object
Signed-off-by: Pranav C <pranavxc@gmail.com>
|
["diff --git a/packages/nc-gui/components/smartsheet/Grid.vue b/packages/nc-gui/components/smartsheet/Grid.vue\nindex 8ff5b1d..e83e2ab 100644\n--- a/packages/nc-gui/components/smartsheet/Grid.vue\n+++ b/packages/nc-gui/components/smartsheet/Grid.vue\n@@ -534,7 +534,7 @@ const saveOrUpdateRecords = async (args: { metaValue?: TableType; viewMetaValue?\n currentRow.rowMeta.changed = false\n for (const field of (args.metaValue || meta.value)?.columns ?? []) {\n if (isVirtualCol(field)) continue\n- if (currentRow.row[field.title!] !== currentRow.oldRow[field.title!]) {\n+ if (field.title! in currentRow.row && currentRow.row[field.title!] !== currentRow.oldRow[field.title!]) {\n await updateOrSaveRow(currentRow, field.title!, {}, args)\n }\n }\n"]
| 1 |
["c6403b62f8dc0e5bfe25a1c6306fb7040ca447ae"]
|
["fix"]
|
Template using kube api version
Signed-off-by: rjshrjndrn <rjshrjndrn@gmail.com>
|
["diff --git a/.github/workflows/api-ee.yaml b/.github/workflows/api-ee.yaml\nindex c014f34..2a12e0d 100644\n--- a/.github/workflows/api-ee.yaml\n+++ b/.github/workflows/api-ee.yaml\n@@ -8,7 +8,7 @@ on:\n default: 'false'\n push:\n branches:\n- - dev\n+ - test_ci\n paths:\n - ee/api/**\n - api/**\n@@ -112,7 +112,8 @@ jobs:\n # Deploy command\n kubectl config set-context --namespace=app --current\n kubectl config get-contexts\n- helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks | kubectl apply -f -\n+ k_version=$(kubectl version --short 2>/dev/null | awk '/Server/{print $NF}')\n+ helm template openreplay -n app openreplay -f vars.yaml -f /tmp/image_override.yaml --set ingress-nginx.enabled=false --set skipMigration=true --no-hooks --kube-version=$k_version | kubectl apply -f -\n env:\n DOCKER_REPO: ${{ secrets.EE_REGISTRY_URL }}\n # We're not passing -ee flag, because helm will add that.\n"]
| 1 |
["c3531347fe5a4cc82d426db195026a5bdad15e7a"]
|
["cicd"]
|
apply element migrated events
This is a very straightforward event applier. All it needs to do is
update the persisted data for the element instance using the data in the
event.
|
["diff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\nindex da05e13..9231df3 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/EventAppliers.java\n@@ -154,6 +154,9 @@ public final class EventAppliers implements EventApplier {\n register(\n ProcessInstanceIntent.SEQUENCE_FLOW_TAKEN,\n new ProcessInstanceSequenceFlowTakenApplier(elementInstanceState, processState));\n+ register(\n+ ProcessInstanceIntent.ELEMENT_MIGRATED,\n+ new ProcessInstanceElementMigratedApplier(elementInstanceState));\n }\n \n private void registerProcessInstanceCreationAppliers(final MutableProcessingState state) {\ndiff --git a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\nindex e5a0f3a..d38358f 100644\n--- a/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n+++ b/engine/src/main/java/io/camunda/zeebe/engine/state/appliers/ProcessInstanceElementMigratedApplier.java\n@@ -24,5 +24,16 @@ final class ProcessInstanceElementMigratedApplier\n }\n \n @Override\n- public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {}\n+ public void applyState(final long elementInstanceKey, final ProcessInstanceRecord value) {\n+ elementInstanceState.updateInstance(\n+ elementInstanceKey,\n+ elementInstance ->\n+ elementInstance\n+ .getValue()\n+ .setProcessDefinitionKey(value.getProcessDefinitionKey())\n+ .setBpmnProcessId(value.getBpmnProcessId())\n+ .setVersion(value.getVersion())\n+ .setElementId(value.getElementId())\n+ .setFlowScopeKey(value.getFlowScopeKey()));\n+ }\n }\n"]
| 1 |
["39d5d1cfe8d2210305df2c8fab4a4ae430732cf7"]
|
["feat"]
|
lint source on ci
|
["diff --git a/.travis.yml b/.travis.yml\nindex d56185e..96510cb 100644\n--- a/.travis.yml\n+++ b/.travis.yml\n@@ -2,5 +2,6 @@ language: node_js\n node_js:\n - 'stable'\n script:\n+ - yarn lint\n - yarn build\n - yarn test\n"]
| 1 |
["2ac99c0a66a1adc18ee4ef660608f814823dd198"]
|
["cicd"]
|
skip ruff format in pre-commit ci runner
|
["diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml\nindex 6193d96..4ba39d6 100644\n--- a/.pre-commit-config.yaml\n+++ b/.pre-commit-config.yaml\n@@ -9,6 +9,7 @@ ci:\n - nixpkgs-fmt\n - prettier\n - ruff\n+ - ruff-format\n - shellcheck\n - shfmt\n - statix\n"]
| 1 |
["9117fdedb9b5ce0345c31b3e1fa22ae8554944d4"]
|
["cicd"]
|
add test for spurious cross join
|
["diff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 4ad32a6..b2e5d72 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -841,3 +841,63 @@ def test_filter_group_by_agg_with_same_name():\n )\n ex = sa.select([t0]).where(t0.c.bigint_col == 60)\n _check(expr, ex)\n+\n+\n+@pytest.fixture\n+def person():\n+ return ibis.table(\n+ dict(id=\"string\", personal=\"string\", family=\"string\"),\n+ name=\"person\",\n+ )\n+\n+\n+@pytest.fixture\n+def visited():\n+ return ibis.table(\n+ dict(id=\"int32\", site=\"string\", dated=\"string\"),\n+ name=\"visited\",\n+ )\n+\n+\n+@pytest.fixture\n+def survey():\n+ return ibis.table(\n+ dict(\n+ taken=\"int32\",\n+ person=\"string\",\n+ quant=\"string\",\n+ reading=\"float32\",\n+ ),\n+ name=\"survey\",\n+ )\n+\n+\n+def test_no_cross_join(person, visited, survey):\n+ expr = person.join(survey, person.id == survey.person).join(\n+ visited,\n+ visited.id == survey.taken,\n+ )\n+\n+ context = AlchemyContext(compiler=AlchemyCompiler)\n+ _ = AlchemyCompiler.to_sql(expr, context)\n+\n+ t0 = context.get_ref(person)\n+ t1 = context.get_ref(survey)\n+ t2 = context.get_ref(visited)\n+\n+ from_ = t0.join(t1, t0.c.id == t1.c.person).join(t2, t2.c.id == t1.c.taken)\n+ ex = sa.select(\n+ [\n+ t0.c.id.label(\"id_x\"),\n+ t0.c.personal,\n+ t0.c.family,\n+ t1.c.taken,\n+ t1.c.person,\n+ t1.c.quant,\n+ t1.c.reading,\n+ t2.c.id.label(\"id_y\"),\n+ t2.c.site,\n+ t2.c.dated,\n+ ]\n+ ).select_from(from_)\n+ _check(expr, ex)\n"]
| 1 |
["8dac3fe5a7a56356ca95547fcf7925bec8d9c1dd"]
|
["test"]
|
add test case with multiple partitions for message
|
["diff --git a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\nindex 693d1da..e3552d4 100644\n--- a/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n+++ b/broker-core/src/main/java/io/zeebe/broker/subscription/command/SubscriptionCommandSender.java\n@@ -74,7 +74,7 @@ public class SubscriptionCommandSender {\n new CloseWorkflowInstanceSubscriptionCommand();\n \n private final ClientTransport subscriptionClient;\n- private final IntArrayList partitionIds;\n+ private final IntArrayList partitionIds = new IntArrayList();\n \n private int partitionId;\n private TopologyPartitionListenerImpl partitionListener;\n@@ -82,7 +82,6 @@ public class SubscriptionCommandSender {\n public SubscriptionCommandSender(\n final ClusterCfg clusterCfg, final ClientTransport subscriptionClient) {\n this.subscriptionClient = subscriptionClient;\n- partitionIds = new IntArrayList();\n partitionIds.addAll(clusterCfg.getPartitionIds());\n }\n \n@@ -100,7 +99,8 @@ public class SubscriptionCommandSender {\n final DirectBuffer messageName,\n final DirectBuffer correlationKey) {\n \n- final int subscriptionPartitionId = getSubscriptionPartitionId(correlationKey);\n+ final int subscriptionPartitionId =\n+ SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n \n openMessageSubscriptionCommand.setSubscriptionPartitionId(subscriptionPartitionId);\n openMessageSubscriptionCommand.setWorkflowInstanceKey(workflowInstanceKey);\n@@ -111,14 +111,6 @@ public class SubscriptionCommandSender {\n return sendSubscriptionCommand(subscriptionPartitionId, openMessageSubscriptionCommand);\n }\n \n- private int getSubscriptionPartitionId(final DirectBuffer correlationKey) {\n- if (partitionIds == null) {\n- throw new IllegalStateException(\"no partition ids available\");\n- }\n-\n- return SubscriptionUtil.getSubscriptionPartitionId(correlationKey, partitionIds.size());\n- }\n-\n public boolean openWorkflowInstanceSubscription(\n final long workflowInstanceKey,\n final long elementInstanceKey,\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\nindex 4baed4f..838c9ca 100644\n--- a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCatchElementTest.java\n@@ -36,7 +36,6 @@ import io.zeebe.model.bpmn.Bpmn;\n import io.zeebe.model.bpmn.BpmnModelInstance;\n import io.zeebe.protocol.clientapi.RecordType;\n import io.zeebe.protocol.clientapi.ValueType;\n-import io.zeebe.protocol.impl.SubscriptionUtil;\n import io.zeebe.protocol.intent.DeploymentIntent;\n import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n@@ -44,7 +43,6 @@ import io.zeebe.protocol.intent.WorkflowInstanceSubscriptionIntent;\n import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n import io.zeebe.test.util.record.RecordingExporter;\n-import io.zeebe.util.buffer.BufferUtil;\n import java.util.List;\n import java.util.stream.Collectors;\n import org.agrona.DirectBuffer;\n@@ -171,39 +169,6 @@ public class MessageCatchElementTest {\n }\n \n @Test\n- public void shouldOpenMessageSubscriptionsOnSamePartition() {\n- // given\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n-\n- final String correlationKey = \"order-123\";\n-\n- final PartitionTestClient workflowPartition = apiRule.partitionClient(partitionIds.get(0));\n- final PartitionTestClient subscriptionPartition =\n- apiRule.partitionClient(getPartitionId(correlationKey));\n-\n- testClient.deploy(CATCH_EVENT_WORKFLOW);\n-\n- // when\n- final long workflowInstanceKey1 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- final long workflowInstanceKey2 =\n- workflowPartition.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", correlationKey));\n-\n- // then\n- final List<Record<MessageSubscriptionRecordValue>> subscriptions =\n- subscriptionPartition\n- .receiveMessageSubscriptions()\n- .withIntent(MessageSubscriptionIntent.OPENED)\n- .limit(2)\n- .collect(Collectors.toList());\n-\n- assertThat(subscriptions)\n- .extracting(s -> s.getValue().getWorkflowInstanceKey())\n- .contains(workflowInstanceKey1, workflowInstanceKey2);\n- }\n-\n- @Test\n public void shouldOpenWorkflowInstanceSubscription() {\n final long workflowInstanceKey =\n testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"orderId\", \"order-123\"));\n@@ -352,10 +317,4 @@ public class MessageCatchElementTest {\n .exists())\n .isTrue();\n }\n-\n- private int getPartitionId(final String correlationKey) {\n- final List<Integer> partitionIds = apiRule.getPartitionIds();\n- return SubscriptionUtil.getSubscriptionPartitionId(\n- BufferUtil.wrapString(correlationKey), partitionIds.size());\n- }\n }\ndiff --git a/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..cf8261a\n--- /dev/null\n+++ b/broker-core/src/test/java/io/zeebe/broker/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,134 @@\n+/*\n+ * Zeebe Broker Core\n+ * Copyright \u00a9 2017 camunda services GmbH (info@camunda.com)\n+ *\n+ * This program is free software: you can redistribute it and/or modify\n+ * it under the terms of the GNU Affero General Public License as published by\n+ * the Free Software Foundation, either version 3 of the License, or\n+ * (at your option) any later version.\n+ *\n+ * This program is distributed in the hope that it will be useful,\n+ * but WITHOUT ANY WARRANTY; without even the implied warranty of\n+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n+ * GNU Affero General Public License for more details.\n+ *\n+ * You should have received a copy of the GNU Affero General Public License\n+ * along with this program. If not, see <http://www.gnu.org/licenses/>.\n+ */\n+package io.zeebe.broker.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static io.zeebe.test.util.MsgPackUtil.asMsgPack;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.impl.SubscriptionUtil;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.broker.protocol.clientapi.ClientApiRule;\n+import io.zeebe.test.broker.protocol.clientapi.PartitionTestClient;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import io.zeebe.util.buffer.BufferUtil;\n+import java.util.List;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"receive-message\")\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+\n+ public ClientApiRule apiRule = new ClientApiRule(brokerRule::getClientAddress);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(apiRule);\n+\n+ private PartitionTestClient testClient;\n+\n+ @Before\n+ public void init() {\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_0)).isEqualTo(0);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_1)).isEqualTo(1);\n+ assertThat(getPartitionId(CORRELATION_KEY_PARTITION_2)).isEqualTo(2);\n+\n+ testClient = apiRule.partitionClient();\n+\n+ testClient.deploy(WORKFLOW);\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(\n+ PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ apiRule\n+ .partitionClient(0)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_0, asMsgPack(\"p\", \"p0\"));\n+ apiRule\n+ .partitionClient(1)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_1, asMsgPack(\"p\", \"p1\"));\n+ apiRule\n+ .partitionClient(2)\n+ .publishMessage(\"message\", CORRELATION_KEY_PARTITION_2, asMsgPack(\"p\", \"p2\"));\n+\n+ // when\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_0));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_1));\n+ testClient.createWorkflowInstance(PROCESS_ID, asMsgPack(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ private int getPartitionId(final String correlationKey) {\n+ final List<Integer> partitionIds = apiRule.getPartitionIds();\n+ return SubscriptionUtil.getSubscriptionPartitionId(\n+ BufferUtil.wrapString(correlationKey), partitionIds.size());\n+ }\n+}\ndiff --git a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\nindex dac11a2..e2b8397 100644\n--- a/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n+++ b/protocol-test-util/src/main/java/io/zeebe/test/broker/protocol/clientapi/PartitionTestClient.java\n@@ -329,6 +329,7 @@ public class PartitionTestClient {\n final String messageName, final String correlationKey, final byte[] payload, final long ttl) {\n return apiRule\n .createCmdRequest()\n+ .partitionId(partitionId)\n .type(ValueType.MESSAGE, MessageIntent.PUBLISH)\n .command()\n .put(\"name\", messageName)\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\nindex 9a122d9..b7db67e 100644\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/startup/BrokerReprocessingTest.java\n@@ -619,14 +619,9 @@ public class BrokerReprocessingTest {\n }\n \n @Test\n- public void shouldCorrelateMessageAfterRestartIfEnteredBeforeA() throws Exception {\n+ public void shouldCorrelateMessageAfterRestartIfEnteredBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n final long workflowInstanceKey =\n startWorkflowInstance(PROCESS_ID, singletonMap(\"orderId\", \"order-123\"))\n@@ -658,12 +653,7 @@ public class BrokerReprocessingTest {\n @Test\n public void shouldCorrelateMessageAfterRestartIfPublishedBefore() throws Exception {\n // given\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(WORKFLOW_MESSAGE, \"message.bpmn\")\n- .send()\n- .join();\n+ deploy(WORKFLOW_MESSAGE, \"message.bpmn\");\n \n publishMessage(\"order canceled\", \"order-123\", singletonMap(\"foo\", \"bar\"));\n reprocessingTrigger.accept(this);\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\ndeleted file mode 100644\nindex c6a05fb..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/MessageCorrelationTest.java\n+++ /dev/null\n@@ -1,176 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH (info@camunda.com)\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-import org.junit.runner.RunWith;\n-import org.junit.runners.Parameterized;\n-import org.junit.runners.Parameterized.Parameter;\n-import org.junit.runners.Parameterized.Parameters;\n-\n-@RunWith(Parameterized.class)\n-public class MessageCorrelationTest {\n-\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private static final BpmnModelInstance CATCH_EVENT_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- private static final BpmnModelInstance RECEIVE_TASK_WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .receiveTask(\"receive-message\")\n- .message(m -> m.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .sequenceFlowId(\"to-end\")\n- .endEvent()\n- .done();\n-\n- @Parameter(0)\n- public String elementType;\n-\n- @Parameter(1)\n- public BpmnModelInstance workflow;\n-\n- @Parameters(name = \"{0}\")\n- public static final Object[][] parameters() {\n- return new Object[][] {\n- {\"intermediate message catch event\", CATCH_EVENT_WORKFLOW},\n- {\"receive task\", RECEIVE_TASK_WORKFLOW}\n- };\n- }\n-\n- @Before\n- public void init() {\n- final DeploymentEvent deploymentEvent =\n- clientRule\n- .getWorkflowClient()\n- .newDeployCommand()\n- .addWorkflowModel(workflow, \"wf.bpmn\")\n- .send()\n- .join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfEnteredBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"receive-message\");\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageIfPublishedBefore() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n- }\n-\n- @Test\n- public void shouldCorrelateMessageAndMergePayload() {\n- // given\n- clientRule\n- .getWorkflowClient()\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- clientRule\n- .getWorkflowClient()\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .payload(Collections.singletonMap(\"foo\", \"bar\"))\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\");\n-\n- assertElementCompleted(\n- \"wf\",\n- \"receive-message\",\n- (catchEventOccurredEvent) ->\n- assertThat(catchEventOccurredEvent.getPayloadAsMap())\n- .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\ndeleted file mode 100644\nindex 7845eec..0000000\n--- a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/PublishMessageTest.java\n+++ /dev/null\n@@ -1,234 +0,0 @@\n-/*\n- * Copyright \u00a9 2017 camunda services GmbH (info@camunda.com)\n- *\n- * Licensed under the Apache License, Version 2.0 (the \"License\");\n- * you may not use this file except in compliance with the License.\n- * You may obtain a copy of the License at\n- *\n- * http://www.apache.org/licenses/LICENSE-2.0\n- *\n- * Unless required by applicable law or agreed to in writing, software\n- * distributed under the License is distributed on an \"AS IS\" BASIS,\n- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n- * See the License for the specific language governing permissions and\n- * limitations under the License.\n- */\n-package io.zeebe.broker.it.workflow;\n-\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n-import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n-import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n-import static org.assertj.core.api.Assertions.assertThat;\n-import static org.assertj.core.api.Assertions.assertThatThrownBy;\n-import static org.assertj.core.api.Assertions.entry;\n-\n-import io.zeebe.broker.it.GrpcClientRule;\n-import io.zeebe.broker.test.EmbeddedBrokerRule;\n-import io.zeebe.client.api.ZeebeFuture;\n-import io.zeebe.client.api.clients.WorkflowClient;\n-import io.zeebe.client.api.events.DeploymentEvent;\n-import io.zeebe.client.api.events.WorkflowInstanceEvent;\n-import io.zeebe.client.cmd.ClientException;\n-import io.zeebe.model.bpmn.Bpmn;\n-import io.zeebe.model.bpmn.BpmnModelInstance;\n-import java.time.Duration;\n-import java.util.Collections;\n-import org.junit.Before;\n-import org.junit.Rule;\n-import org.junit.Test;\n-import org.junit.rules.RuleChain;\n-\n-public class PublishMessageTest {\n-\n- private static final BpmnModelInstance WORKFLOW =\n- Bpmn.createExecutableProcess(\"wf\")\n- .startEvent()\n- .intermediateCatchEvent(\"catch-event\")\n- .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n- .endEvent()\n- .done();\n- public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n- public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n-\n- @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n-\n- private WorkflowClient workflowClient;\n-\n- @Before\n- public void init() {\n-\n- workflowClient = clientRule.getClient().workflowClient();\n-\n- final DeploymentEvent deploymentEvent =\n- workflowClient.newDeployCommand().addWorkflowModel(WORKFLOW, \"wf.bpmn\").send().join();\n-\n- clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageToAllSubscriptions() {\n- // given\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldCorrelateMessageWithZeroTTL() {\n- // given\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- assertElementActivated(\"catch-event\");\n-\n- // when\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .send()\n- .join();\n-\n- // then\n- assertElementCompleted(\"wf\", \"catch-event\");\n- }\n-\n- @Test\n- public void shouldNotCorrelateMessageAfterTTL() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ZERO)\n- .payload(Collections.singletonMap(\"msg\", \"failure\"))\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .timeToLive(Duration.ofMinutes(1))\n- .payload(Collections.singletonMap(\"msg\", \"expected\"))\n- .send()\n- .join();\n-\n- // when\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- // then\n-\n- assertElementCompleted(\n- \"wf\",\n- \"catch-event\",\n- (catchEventOccurred) ->\n- assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n- }\n-\n- @Test\n- public void shouldCorrelateMessageOnDifferentPartitions() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .send()\n- .join();\n-\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-124\")\n- .send()\n- .join();\n-\n- // when\n- final WorkflowInstanceEvent wf =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-123\\\"}\")\n- .send()\n- .join();\n-\n- final WorkflowInstanceEvent wf2 =\n- workflowClient\n- .newCreateInstanceCommand()\n- .bpmnProcessId(\"wf\")\n- .latestVersion()\n- .payload(\"{\\\"orderId\\\":\\\"order-124\\\"}\")\n- .send()\n- .join();\n-\n- // then\n- assertWorkflowInstanceCompleted(\"wf\", wf.getWorkflowInstanceKey());\n- assertWorkflowInstanceCompleted(\"wf\", wf2.getWorkflowInstanceKey());\n- }\n-\n- @Test\n- public void shouldRejectMessageWithSameId() {\n- // given\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send()\n- .join();\n-\n- // when\n- final ZeebeFuture<Void> future =\n- workflowClient\n- .newPublishMessageCommand()\n- .messageName(\"order canceled\")\n- .correlationKey(\"order-123\")\n- .messageId(\"foo\")\n- .send();\n-\n- // then\n- assertThatThrownBy(future::join)\n- .isInstanceOf(ClientException.class)\n- .hasMessageContaining(\"message with id 'foo' is already published\");\n- }\n-}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\nnew file mode 100644\nindex 0000000..0e37c95\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationMultiplePartitionsTest.java\n@@ -0,0 +1,196 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH (info@camunda.com)\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.test.EmbeddedBrokerConfigurator.setPartitionCount;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.tuple;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import io.zeebe.protocol.intent.MessageIntent;\n+import io.zeebe.protocol.intent.MessageSubscriptionIntent;\n+import io.zeebe.protocol.intent.WorkflowInstanceIntent;\n+import io.zeebe.test.util.record.RecordingExporter;\n+import java.util.Collections;\n+import java.util.stream.IntStream;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationMultiplePartitionsTest {\n+\n+ private static final String CORRELATION_KEY_PARTITION_0 = \"item-2\";\n+ private static final String CORRELATION_KEY_PARTITION_1 = \"item-1\";\n+ private static final String CORRELATION_KEY_PARTITION_2 = \"item-0\";\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule(setPartitionCount(3));\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent()\n+ .message(m -> m.name(\"message\").zeebeCorrelationKey(\"$.key\"))\n+ .endEvent(\"end\")\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldPublishMessageOnDifferentPartitions() {\n+ // when\n+ IntStream.range(0, 10)\n+ .forEach(\n+ i -> {\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+ });\n+\n+ // then\n+ assertThat(RecordingExporter.messageRecords(MessageIntent.PUBLISHED).limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageOnDifferentPartitions() {\n+ // given\n+ publishMessage(CORRELATION_KEY_PARTITION_0, Collections.singletonMap(\"p\", \"p0\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_1, Collections.singletonMap(\"p\", \"p1\"));\n+ publishMessage(CORRELATION_KEY_PARTITION_2, Collections.singletonMap(\"p\", \"p2\"));\n+\n+ // when\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.workflowInstanceRecords(WorkflowInstanceIntent.END_EVENT_OCCURRED)\n+ .withElementId(\"end\")\n+ .limit(3))\n+ .extracting(r -> r.getValue().getPayloadAsMap().get(\"p\"))\n+ .contains(\"p0\", \"p1\", \"p2\");\n+ }\n+\n+ @Test\n+ public void shouldOpenMessageSubscriptionsOnSamePartitionsAfterRestart() {\n+ // given\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(15)\n+ .exists())\n+ .isTrue();\n+\n+ // when\n+ brokerRule.stopBroker();\n+ brokerRule.startBroker();\n+\n+ IntStream.range(0, 5)\n+ .forEach(\n+ i -> {\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_0));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_1));\n+ createWorkflowInstance(Collections.singletonMap(\"key\", CORRELATION_KEY_PARTITION_2));\n+ });\n+\n+ // then\n+ assertThat(\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .limit(30))\n+ .extracting(r -> tuple(r.getMetadata().getPartitionId(), r.getValue().getCorrelationKey()))\n+ .containsOnly(\n+ tuple(0, CORRELATION_KEY_PARTITION_0),\n+ tuple(1, CORRELATION_KEY_PARTITION_1),\n+ tuple(2, CORRELATION_KEY_PARTITION_2));\n+ }\n+\n+ private void createWorkflowInstance(Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+\n+ private void publishMessage(String correlationKey, Object payload) {\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"message\")\n+ .correlationKey(correlationKey)\n+ .payload(payload)\n+ .send()\n+ .join();\n+ }\n+}\ndiff --git a/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\nnew file mode 100644\nindex 0000000..3b08572\n--- /dev/null\n+++ b/qa/integration-tests/src/test/java/io/zeebe/broker/it/workflow/message/MessageCorrelationTest.java\n@@ -0,0 +1,198 @@\n+/*\n+ * Copyright \u00a9 2017 camunda services GmbH (info@camunda.com)\n+ *\n+ * Licensed under the Apache License, Version 2.0 (the \"License\");\n+ * you may not use this file except in compliance with the License.\n+ * You may obtain a copy of the License at\n+ *\n+ * http://www.apache.org/licenses/LICENSE-2.0\n+ *\n+ * Unless required by applicable law or agreed to in writing, software\n+ * distributed under the License is distributed on an \"AS IS\" BASIS,\n+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n+ * See the License for the specific language governing permissions and\n+ * limitations under the License.\n+ */\n+package io.zeebe.broker.it.workflow.message;\n+\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementActivated;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertElementCompleted;\n+import static io.zeebe.broker.it.util.ZeebeAssertHelper.assertWorkflowInstanceCompleted;\n+import static org.assertj.core.api.Assertions.assertThat;\n+import static org.assertj.core.api.Assertions.assertThatThrownBy;\n+import static org.assertj.core.api.Assertions.entry;\n+\n+import io.zeebe.broker.it.GrpcClientRule;\n+import io.zeebe.broker.test.EmbeddedBrokerRule;\n+import io.zeebe.client.api.ZeebeFuture;\n+import io.zeebe.client.api.events.DeploymentEvent;\n+import io.zeebe.client.cmd.ClientException;\n+import io.zeebe.model.bpmn.Bpmn;\n+import io.zeebe.model.bpmn.BpmnModelInstance;\n+import java.time.Duration;\n+import java.util.Collections;\n+import org.junit.Before;\n+import org.junit.Rule;\n+import org.junit.Test;\n+import org.junit.rules.RuleChain;\n+\n+public class MessageCorrelationTest {\n+\n+ private static final String PROCESS_ID = \"process\";\n+\n+ public EmbeddedBrokerRule brokerRule = new EmbeddedBrokerRule();\n+ public GrpcClientRule clientRule = new GrpcClientRule(brokerRule);\n+\n+ @Rule public RuleChain ruleChain = RuleChain.outerRule(brokerRule).around(clientRule);\n+\n+ private static final BpmnModelInstance WORKFLOW =\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .intermediateCatchEvent(\"catch-event\")\n+ .message(c -> c.name(\"order canceled\").zeebeCorrelationKey(\"$.orderId\"))\n+ .endEvent()\n+ .done();\n+\n+ @Before\n+ public void init() {\n+ final DeploymentEvent deploymentEvent =\n+ clientRule\n+ .getWorkflowClient()\n+ .newDeployCommand()\n+ .addWorkflowModel(WORKFLOW, \"wf.bpmn\")\n+ .send()\n+ .join();\n+\n+ clientRule.waitUntilDeploymentIsDone(deploymentEvent.getKey());\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessage() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .payload(Collections.singletonMap(\"foo\", \"bar\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertWorkflowInstanceCompleted(PROCESS_ID);\n+\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurredEvent) ->\n+ assertThat(catchEventOccurredEvent.getPayloadAsMap())\n+ .containsExactly(entry(\"orderId\", \"order-123\"), entry(\"foo\", \"bar\")));\n+ }\n+\n+ @Test\n+ public void shouldCorrelateMessageWithZeroTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ assertElementActivated(\"catch-event\");\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(PROCESS_ID, \"catch-event\");\n+ }\n+\n+ @Test\n+ public void shouldNotCorrelateMessageAfterTTL() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ZERO)\n+ .payload(Collections.singletonMap(\"msg\", \"failure\"))\n+ .send()\n+ .join();\n+\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .timeToLive(Duration.ofMinutes(1))\n+ .payload(Collections.singletonMap(\"msg\", \"expected\"))\n+ .send()\n+ .join();\n+\n+ // when\n+ clientRule\n+ .getWorkflowClient()\n+ .newCreateInstanceCommand()\n+ .bpmnProcessId(PROCESS_ID)\n+ .latestVersion()\n+ .payload(Collections.singletonMap(\"orderId\", \"order-123\"))\n+ .send()\n+ .join();\n+\n+ // then\n+ assertElementCompleted(\n+ PROCESS_ID,\n+ \"catch-event\",\n+ (catchEventOccurred) ->\n+ assertThat(catchEventOccurred.getPayloadAsMap()).contains(entry(\"msg\", \"expected\")));\n+ }\n+\n+ @Test\n+ public void shouldRejectMessageWithSameId() {\n+ // given\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send()\n+ .join();\n+\n+ // when\n+ final ZeebeFuture<Void> future =\n+ clientRule\n+ .getWorkflowClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"order canceled\")\n+ .correlationKey(\"order-123\")\n+ .messageId(\"foo\")\n+ .send();\n+\n+ // then\n+ assertThatThrownBy(future::join)\n+ .isInstanceOf(ClientException.class)\n+ .hasMessageContaining(\"message with id 'foo' is already published\");\n+ }\n+}\n"]
| 1 |
["2d416be63eeec9e7fdb90a62c40c8ad8f0672efa"]
|
["test"]
|
note about archive without license and readme
closes #602
|
["diff --git a/docs/060-archive.md b/docs/060-archive.md\nindex e5d7cfb..bf0d569 100644\n--- a/docs/060-archive.md\n+++ b/docs/060-archive.md\n@@ -85,3 +85,24 @@ Then you can run:\n ```console\n GOVERSION_NR=$(go version | awk '{print $3;}') goreleaser\n ```\n+\n+## Packaging only the binaries\n+\n+Since GoReleaser will always add the `README` and `LICENSE` files to the\n+archive if the file list is empty, you'll need to provide a filled `files`\n+on the archive section.\n+\n+A working hack is to use something like this:\n+\n+```yaml\n+# goreleaser.yml\n+archive:\n+ files:\n+ - none*\n+```\n+\n+This would add all files matching the glob `none*`, provide that you don't\n+have any files matching that glob, only the binary will be added to the\n+archive.\n+\n+For more information, check [#602](https://github.com/goreleaser/goreleaser/issues/602)\n"]
| 1 |
["7648760108613f771a5e6e40bb87a8f1fcee21ad"]
|
["docs"]
|
dedup redundant imports
|
["diff --git a/ibis/backends/base/__init__.py b/ibis/backends/base/__init__.py\nindex effd44c..a59c0ec 100644\n--- a/ibis/backends/base/__init__.py\n+++ b/ibis/backends/base/__init__.py\n@@ -31,7 +31,7 @@ import ibis.common.exceptions as exc\n import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n \n __all__ = ('BaseBackend', 'Database', 'connect')\n \ndiff --git a/ibis/backends/base/sql/__init__.py b/ibis/backends/base/sql/__init__.py\nindex e4f2129..7bbdaf9 100644\n--- a/ibis/backends/base/sql/__init__.py\n+++ b/ibis/backends/base/sql/__init__.py\n@@ -12,7 +12,7 @@ import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import BaseBackend\n from ibis.backends.base.sql.compiler import Compiler\n \ndiff --git a/ibis/backends/base/sql/alchemy/__init__.py b/ibis/backends/base/sql/alchemy/__init__.py\nindex 71cc0e8..ab89d7d 100644\n--- a/ibis/backends/base/sql/alchemy/__init__.py\n+++ b/ibis/backends/base/sql/alchemy/__init__.py\n@@ -11,7 +11,7 @@ import ibis\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.alchemy.database import AlchemyDatabase, AlchemyTable\n from ibis.backends.base.sql.alchemy.datatypes import (\ndiff --git a/ibis/backends/base/sql/alchemy/query_builder.py b/ibis/backends/base/sql/alchemy/query_builder.py\nindex 54c74ba..0ec432f 100644\n--- a/ibis/backends/base/sql/alchemy/query_builder.py\n+++ b/ibis/backends/base/sql/alchemy/query_builder.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import functools\n \n import sqlalchemy as sa\n-import sqlalchemy.sql as sql\n+from sqlalchemy import sql\n \n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/base/sql/compiler/base.py b/ibis/backends/base/sql/compiler/base.py\nindex 84102aa..fb44667 100644\n--- a/ibis/backends/base/sql/compiler/base.py\n+++ b/ibis/backends/base/sql/compiler/base.py\n@@ -7,7 +7,7 @@ import toolz\n \n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n class DML(abc.ABC):\ndiff --git a/ibis/backends/base/sql/compiler/query_builder.py b/ibis/backends/base/sql/compiler/query_builder.py\nindex a2d5214..95f5e8d 100644\n--- a/ibis/backends/base/sql/compiler/query_builder.py\n+++ b/ibis/backends/base/sql/compiler/query_builder.py\n@@ -8,7 +8,7 @@ import toolz\n import ibis.common.exceptions as com\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.compiler.base import DML, QueryAST, SetOp\n from ibis.backends.base.sql.compiler.select_builder import SelectBuilder, _LimitSpec\n from ibis.backends.base.sql.compiler.translator import ExprTranslator, QueryContext\ndiff --git a/ibis/backends/base/sql/registry/main.py b/ibis/backends/base/sql/registry/main.py\nindex 77f70a5..586ace5 100644\n--- a/ibis/backends/base/sql/registry/main.py\n+++ b/ibis/backends/base/sql/registry/main.py\n@@ -4,7 +4,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import (\n aggregate,\n binary_infix,\ndiff --git a/ibis/backends/base/sql/registry/timestamp.py b/ibis/backends/base/sql/registry/timestamp.py\nindex 412eab1..3c8571f 100644\n--- a/ibis/backends/base/sql/registry/timestamp.py\n+++ b/ibis/backends/base/sql/registry/timestamp.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n-import ibis.util as util\n+from ibis import util\n \n \n def extract_field(sql_attr):\ndiff --git a/ibis/backends/clickhouse/tests/test_client.py b/ibis/backends/clickhouse/tests/test_client.py\nindex 8db6672..bb1b9ba 100644\n--- a/ibis/backends/clickhouse/tests/test_client.py\n+++ b/ibis/backends/clickhouse/tests/test_client.py\n@@ -3,9 +3,9 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.backends.clickhouse.tests.conftest import (\n CLICKHOUSE_HOST,\n CLICKHOUSE_PASS,\ndiff --git a/ibis/backends/conftest.py b/ibis/backends/conftest.py\nindex 3a974da..ba7ad75 100644\n--- a/ibis/backends/conftest.py\n+++ b/ibis/backends/conftest.py\n@@ -20,7 +20,7 @@ if TYPE_CHECKING:\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import _get_backend_names\n \n TEST_TABLES = {\ndiff --git a/ibis/backends/dask/execution/util.py b/ibis/backends/dask/execution/util.py\nindex 61bff7e..7ed0c10 100644\n--- a/ibis/backends/dask/execution/util.py\n+++ b/ibis/backends/dask/execution/util.py\n@@ -9,13 +9,13 @@ import pandas as pd\n from dask.dataframe.groupby import SeriesGroupBy\n \n import ibis.backends.pandas.execution.util as pd_util\n-import ibis.common.graph as graph\n import ibis.expr.analysis as an\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n import ibis.util\n from ibis.backends.dask.core import execute\n from ibis.backends.pandas.trace import TraceTwoLevelDispatcher\n+from ibis.common import graph\n from ibis.expr.scope import Scope\n \n if TYPE_CHECKING:\ndiff --git a/ibis/backends/duckdb/datatypes.py b/ibis/backends/duckdb/datatypes.py\nindex fd6b8f5..52c0719 100644\n--- a/ibis/backends/duckdb/datatypes.py\n+++ b/ibis/backends/duckdb/datatypes.py\n@@ -3,7 +3,7 @@ from __future__ import annotations\n import parsy as p\n import toolz\n \n-import ibis.util as util\n+from ibis import util\n from ibis.common.parsing import (\n COMMA,\n FIELD,\ndiff --git a/ibis/backends/impala/__init__.py b/ibis/backends/impala/__init__.py\nindex 4ad2057..8299a28 100644\n--- a/ibis/backends/impala/__init__.py\n+++ b/ibis/backends/impala/__init__.py\n@@ -20,7 +20,7 @@ import ibis.config\n import ibis.expr.datatypes as dt\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.ddl import (\n CTAS,\ndiff --git a/ibis/backends/impala/client.py b/ibis/backends/impala/client.py\nindex 6655ce7..78d526f 100644\n--- a/ibis/backends/impala/client.py\n+++ b/ibis/backends/impala/client.py\n@@ -10,7 +10,7 @@ import sqlalchemy as sa\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base import Database\n from ibis.backends.base.sql.compiler import DDL, DML\n from ibis.backends.base.sql.ddl import (\ndiff --git a/ibis/backends/impala/pandas_interop.py b/ibis/backends/impala/pandas_interop.py\nindex f410a8b..e687884 100644\n--- a/ibis/backends/impala/pandas_interop.py\n+++ b/ibis/backends/impala/pandas_interop.py\n@@ -22,7 +22,7 @@ from posixpath import join as pjoin\n import ibis.backends.pandas.client # noqa: F401\n import ibis.common.exceptions as com\n import ibis.expr.schema as sch\n-import ibis.util as util\n+from ibis import util\n from ibis.config import options\n \n \ndiff --git a/ibis/backends/impala/tests/conftest.py b/ibis/backends/impala/tests/conftest.py\nindex 1075ebe..a815be5 100644\n--- a/ibis/backends/impala/tests/conftest.py\n+++ b/ibis/backends/impala/tests/conftest.py\n@@ -13,8 +13,7 @@ import pytest\n \n import ibis\n import ibis.expr.types as ir\n-import ibis.util as util\n-from ibis import options\n+from ibis import options, util\n from ibis.backends.base import BaseBackend\n from ibis.backends.conftest import TEST_TABLES, _random_identifier\n from ibis.backends.impala.compiler import ImpalaCompiler, ImpalaExprTranslator\ndiff --git a/ibis/backends/impala/tests/test_client.py b/ibis/backends/impala/tests/test_client.py\nindex 0b56054..3fcca3a 100644\n--- a/ibis/backends/impala/tests/test_client.py\n+++ b/ibis/backends/impala/tests/test_client.py\n@@ -7,9 +7,9 @@ import pytz\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_ddl.py b/ibis/backends/impala/tests/test_ddl.py\nindex 870c4dc..2346a3d 100644\n--- a/ibis/backends/impala/tests/test_ddl.py\n+++ b/ibis/backends/impala/tests/test_ddl.py\n@@ -6,7 +6,7 @@ import ibis\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.ddl import fully_qualified_re\n from ibis.tests.util import assert_equal\n \ndiff --git a/ibis/backends/impala/tests/test_exprs.py b/ibis/backends/impala/tests/test_exprs.py\nindex cfc8552..1d6f44f 100644\n--- a/ibis/backends/impala/tests/test_exprs.py\n+++ b/ibis/backends/impala/tests/test_exprs.py\n@@ -5,10 +5,10 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.types as ir\n from ibis import literal as L\n from ibis.backends.impala.compiler import ImpalaCompiler\n+from ibis.expr import api\n from ibis.expr.datatypes import Category\n \n \ndiff --git a/ibis/backends/impala/tests/test_partition.py b/ibis/backends/impala/tests/test_partition.py\nindex 1f96e7d..44217a4 100644\n--- a/ibis/backends/impala/tests/test_partition.py\n+++ b/ibis/backends/impala/tests/test_partition.py\n@@ -6,7 +6,7 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pytest.importorskip(\"impala\")\ndiff --git a/ibis/backends/impala/tests/test_udf.py b/ibis/backends/impala/tests/test_udf.py\nindex 895918b..fd950d5 100644\n--- a/ibis/backends/impala/tests/test_udf.py\n+++ b/ibis/backends/impala/tests/test_udf.py\n@@ -9,11 +9,11 @@ import ibis\n import ibis.backends.impala as api\n import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n-import ibis.expr.rules as rules\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.impala import ddl\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import rules\n \n pytest.importorskip(\"impala\")\n \ndiff --git a/ibis/backends/impala/udf.py b/ibis/backends/impala/udf.py\nindex c6f2ef6..8b8b552 100644\n--- a/ibis/backends/impala/udf.py\n+++ b/ibis/backends/impala/udf.py\n@@ -21,7 +21,7 @@ import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.udf.validate as v\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql.registry import fixed_arity, sql_type_names\n from ibis.backends.impala.compiler import ImpalaExprTranslator\n \ndiff --git a/ibis/backends/mysql/__init__.py b/ibis/backends/mysql/__init__.py\nindex c0ddacb..50b331a 100644\n--- a/ibis/backends/mysql/__init__.py\n+++ b/ibis/backends/mysql/__init__.py\n@@ -8,7 +8,7 @@ import warnings\n from typing import Literal\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\ndiff --git a/ibis/backends/mysql/compiler.py b/ibis/backends/mysql/compiler.py\nindex 13819cb..7456f71 100644\n--- a/ibis/backends/mysql/compiler.py\n+++ b/ibis/backends/mysql/compiler.py\n@@ -1,7 +1,7 @@\n from __future__ import annotations\n \n import sqlalchemy as sa\n-import sqlalchemy.dialects.mysql as mysql\n+from sqlalchemy.dialects import mysql\n \n import ibis.expr.datatypes as dt\n from ibis.backends.base.sql.alchemy import AlchemyCompiler, AlchemyExprTranslator\ndiff --git a/ibis/backends/postgres/tests/test_functions.py b/ibis/backends/postgres/tests/test_functions.py\nindex 33c6d2e..0f377e3 100644\n--- a/ibis/backends/postgres/tests/test_functions.py\n+++ b/ibis/backends/postgres/tests/test_functions.py\n@@ -11,9 +11,9 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.config as config\n import ibis.expr.datatypes as dt\n import ibis.expr.types as ir\n+from ibis import config\n from ibis import literal as L\n from ibis.expr.window import rows_with_max_lookback\n \ndiff --git a/ibis/backends/pyspark/__init__.py b/ibis/backends/pyspark/__init__.py\nindex 1b42080..b994911 100644\n--- a/ibis/backends/pyspark/__init__.py\n+++ b/ibis/backends/pyspark/__init__.py\n@@ -14,8 +14,7 @@ import ibis.config\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.expr.types as types\n-import ibis.util as util\n+from ibis import util\n from ibis.backends.base.sql import BaseSQLBackend\n from ibis.backends.base.sql.compiler import Compiler, TableSetFormatter\n from ibis.backends.base.sql.ddl import (\n@@ -217,16 +216,16 @@ class Backend(BaseSQLBackend):\n **kwargs: Any,\n ) -> Any:\n \"\"\"Execute an expression.\"\"\"\n- if isinstance(expr, types.Table):\n+ if isinstance(expr, ir.Table):\n return self.compile(expr, timecontext, params, **kwargs).toPandas()\n- elif isinstance(expr, types.Column):\n+ elif isinstance(expr, ir.Column):\n # expression must be named for the projection\n if not expr.has_name():\n expr = expr.name(\"tmp\")\n return self.compile(\n expr.to_projection(), timecontext, params, **kwargs\n ).toPandas()[expr.get_name()]\n- elif isinstance(expr, types.Scalar):\n+ elif isinstance(expr, ir.Scalar):\n compiled = self.compile(expr, timecontext, params, **kwargs)\n if isinstance(compiled, Column):\n # attach result column to a fake DataFrame and\ndiff --git a/ibis/backends/pyspark/tests/test_ddl.py b/ibis/backends/pyspark/tests/test_ddl.py\nindex 0288062..ccc8a97 100644\n--- a/ibis/backends/pyspark/tests/test_ddl.py\n+++ b/ibis/backends/pyspark/tests/test_ddl.py\n@@ -5,7 +5,7 @@ import pytest\n \n import ibis\n import ibis.common.exceptions as com\n-import ibis.util as util\n+from ibis import util\n from ibis.tests.util import assert_equal\n \n pyspark = pytest.importorskip(\"pyspark\")\ndiff --git a/ibis/backends/sqlite/tests/test_client.py b/ibis/backends/sqlite/tests/test_client.py\nindex 95aa24d..ad64700 100644\n--- a/ibis/backends/sqlite/tests/test_client.py\n+++ b/ibis/backends/sqlite/tests/test_client.py\n@@ -5,8 +5,8 @@ import pandas.testing as tm\n import pytest\n \n import ibis\n-import ibis.config as config\n import ibis.expr.types as ir\n+from ibis import config\n \n pytest.importorskip(\"sqlalchemy\")\n \ndiff --git a/ibis/expr/format.py b/ibis/expr/format.py\nindex e3d48cd..85fab3f 100644\n--- a/ibis/expr/format.py\n+++ b/ibis/expr/format.py\n@@ -9,13 +9,13 @@ from typing import Any, Callable, Deque, Iterable, Mapping, Tuple\n import rich.pretty\n \n import ibis\n-import ibis.common.graph as graph\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n import ibis.expr.window as win\n-import ibis.util as util\n+from ibis import util\n+from ibis.common import graph\n \n Aliases = Mapping[ops.TableNode, int]\n Deps = Deque[Tuple[int, ops.TableNode]]\ndiff --git a/ibis/expr/operations/relations.py b/ibis/expr/operations/relations.py\nindex 080ddcd..de44a15 100644\n--- a/ibis/expr/operations/relations.py\n+++ b/ibis/expr/operations/relations.py\n@@ -11,7 +11,7 @@ import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute\n from ibis.expr.deferred import Deferred\n from ibis.expr.operations.core import Named, Node, Value\ndiff --git a/ibis/expr/rules.py b/ibis/expr/rules.py\nindex 9b1a3b7..d40700e 100644\n--- a/ibis/expr/rules.py\n+++ b/ibis/expr/rules.py\n@@ -11,7 +11,7 @@ import ibis.common.exceptions as com\n import ibis.expr.datatypes as dt\n import ibis.expr.schema as sch\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.annotations import attribute, optional\n from ibis.common.validators import (\n bool_,\ndiff --git a/ibis/expr/timecontext.py b/ibis/expr/timecontext.py\nindex 7ecd8e7..9620d6c 100644\n--- a/ibis/expr/timecontext.py\n+++ b/ibis/expr/timecontext.py\n@@ -38,8 +38,8 @@ from typing import TYPE_CHECKING, Any\n import numpy as np\n \n import ibis.common.exceptions as com\n-import ibis.config as config\n import ibis.expr.operations as ops\n+from ibis import config\n \n if TYPE_CHECKING:\n import pandas as pd\ndiff --git a/ibis/expr/types/groupby.py b/ibis/expr/types/groupby.py\nindex 138f92e..97aaaa2 100644\n--- a/ibis/expr/types/groupby.py\n+++ b/ibis/expr/types/groupby.py\n@@ -22,7 +22,7 @@ from typing import Iterable, Sequence\n import ibis.expr.analysis as an\n import ibis.expr.types as ir\n import ibis.expr.window as _window\n-import ibis.util as util\n+from ibis import util\n from ibis.expr.deferred import Deferred\n \n _function_types = tuple(\ndiff --git a/ibis/expr/window.py b/ibis/expr/window.py\nindex 5ef3bb1..3e0efdc 100644\n--- a/ibis/expr/window.py\n+++ b/ibis/expr/window.py\n@@ -11,7 +11,7 @@ import toolz\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n-import ibis.util as util\n+from ibis import util\n from ibis.common.exceptions import IbisInputError\n from ibis.common.grounds import Comparable\n \ndiff --git a/ibis/tests/expr/test_decimal.py b/ibis/tests/expr/test_decimal.py\nindex 85d8eb2..12b809b 100644\n--- a/ibis/tests/expr/test_decimal.py\n+++ b/ibis/tests/expr/test_decimal.py\n@@ -3,10 +3,10 @@ import operator\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_type_metadata(lineitem):\ndiff --git a/ibis/tests/expr/test_interactive.py b/ibis/tests/expr/test_interactive.py\nindex cea1945..0c5613b 100644\n--- a/ibis/tests/expr/test_interactive.py\n+++ b/ibis/tests/expr/test_interactive.py\n@@ -14,7 +14,7 @@\n \n import pytest\n \n-import ibis.config as config\n+from ibis import config\n from ibis.tests.expr.mocks import MockBackend\n \n \ndiff --git a/ibis/tests/expr/test_table.py b/ibis/tests/expr/test_table.py\nindex 04f4a7d..3f77985 100644\n--- a/ibis/tests/expr/test_table.py\n+++ b/ibis/tests/expr/test_table.py\n@@ -10,13 +10,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as an\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n from ibis import _\n from ibis import literal as L\n from ibis.common.exceptions import RelationError\n+from ibis.expr import api\n from ibis.expr.types import Column, Table\n from ibis.tests.expr.mocks import MockAlchemyBackend, MockBackend\n from ibis.tests.util import assert_equal, assert_pickle_roundtrip\ndiff --git a/ibis/tests/expr/test_temporal.py b/ibis/tests/expr/test_temporal.py\nindex e76e71c..9a0f43f 100644\n--- a/ibis/tests/expr/test_temporal.py\n+++ b/ibis/tests/expr/test_temporal.py\n@@ -5,10 +5,10 @@ import pytest\n from pytest import param\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_temporal_literals():\ndiff --git a/ibis/tests/expr/test_timestamp.py b/ibis/tests/expr/test_timestamp.py\nindex 6601c8b..7782787 100644\n--- a/ibis/tests/expr/test_timestamp.py\n+++ b/ibis/tests/expr/test_timestamp.py\n@@ -5,11 +5,11 @@ import pandas as pd\n import pytest\n \n import ibis\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n+from ibis.expr import api\n \n \n def test_field_select(alltypes):\ndiff --git a/ibis/tests/expr/test_value_exprs.py b/ibis/tests/expr/test_value_exprs.py\nindex 4c3d475..9eb247c 100644\n--- a/ibis/tests/expr/test_value_exprs.py\n+++ b/ibis/tests/expr/test_value_exprs.py\n@@ -15,13 +15,13 @@ from pytest import param\n import ibis\n import ibis.common.exceptions as com\n import ibis.expr.analysis as L\n-import ibis.expr.api as api\n import ibis.expr.datatypes as dt\n import ibis.expr.operations as ops\n import ibis.expr.rules as rlz\n import ibis.expr.types as ir\n from ibis import _, literal\n from ibis.common.exceptions import IbisTypeError\n+from ibis.expr import api\n from ibis.tests.util import assert_equal\n \n \ndiff --git a/ibis/tests/expr/test_visualize.py b/ibis/tests/expr/test_visualize.py\nindex 5525944..253564f 100644\n--- a/ibis/tests/expr/test_visualize.py\n+++ b/ibis/tests/expr/test_visualize.py\n@@ -9,8 +9,8 @@ import ibis.expr.types as ir\n \n pytest.importorskip('graphviz')\n \n-import ibis.expr.api as api # noqa: E402\n import ibis.expr.visualize as viz # noqa: E402\n+from ibis.expr import api # noqa: E402\n \n pytestmark = pytest.mark.skipif(\n int(os.environ.get('CONDA_BUILD', 0)) == 1, reason='CONDA_BUILD defined'\ndiff --git a/ibis/tests/sql/test_sqlalchemy.py b/ibis/tests/sql/test_sqlalchemy.py\nindex 2ad5453..3aa8c3d 100644\n--- a/ibis/tests/sql/test_sqlalchemy.py\n+++ b/ibis/tests/sql/test_sqlalchemy.py\n@@ -15,8 +15,8 @@\n import operator\n \n import pytest\n-import sqlalchemy.sql as sql\n from sqlalchemy import func as F\n+from sqlalchemy import sql\n from sqlalchemy import types as sat\n \n import ibis\ndiff --git a/ibis/tests/util.py b/ibis/tests/util.py\nindex f79d09a..025bfc7 100644\n--- a/ibis/tests/util.py\n+++ b/ibis/tests/util.py\n@@ -5,7 +5,7 @@ from __future__ import annotations\n import pickle\n \n import ibis\n-import ibis.util as util\n+from ibis import util\n \n \n def assert_equal(left, right):\ndiff --git a/pyproject.toml b/pyproject.toml\nindex f2146d4..492ad9e 100644\n--- a/pyproject.toml\n+++ b/pyproject.toml\n@@ -310,6 +310,7 @@ select = [\n \"PGH\", # pygrep-hooks\n \"PLC\", # pylint\n \"PLE\", # pylint\n+ \"PLR\", # pylint import style\n \"PLW\", # pylint\n \"RET\", # flake8-return\n \"RUF\", # ruff-specific rules\n"]
| 1 |
["8d53d724275ebe4b2a0bb0bd7e2c2dfc399e049b"]
|
["refactor"]
|
build improvements
|
["diff --git a/.travis.yml b/.travis.yml\nindex 9e1b926..3144244 100644\n--- a/.travis.yml\n+++ b/.travis.yml\n@@ -1,5 +1,6 @@\n language: node_js\n dist: trusty\n+sudo: required\n node_js:\n - '6.9.5'\n before_install:\ndiff --git a/e2e/schematics/command-line.test.ts b/e2e/schematics/command-line.test.ts\nindex 16d8b34..ea91494 100644\n--- a/e2e/schematics/command-line.test.ts\n+++ b/e2e/schematics/command-line.test.ts\n@@ -68,8 +68,6 @@ describe('Command line', () => {\n \n updateFile('apps/myapp/src/app/app.component.spec.ts', `import '@nrwl/mylib';`);\n \n- updateRunAffectedToWorkInE2ESetup();\n-\n const affectedApps = runCommand('npm run affected:apps -- --files=\"libs/mylib/index.ts\"');\n expect(affectedApps).toContain('myapp');\n expect(affectedApps).not.toContain('myapp2');\n@@ -147,11 +145,3 @@ describe('Command line', () => {\n 1000000\n );\n });\n-\n-function updateRunAffectedToWorkInE2ESetup() {\n- const runAffected = readFile('node_modules/@nrwl/schematics/src/command-line/affected.js');\n- const newRunAffected = runAffected\n- .replace('ng build', '../../node_modules/.bin/ng build')\n- .replace('ng e2e', '../../node_modules/.bin/ng e2e');\n- updateFile('node_modules/@nrwl/schematics/src/command-line/affected.js', newRunAffected);\n-}\ndiff --git a/e2e/schematics/workspace.test.ts b/e2e/schematics/workspace.test.ts\nindex 8a41070..8749926 100644\n--- a/e2e/schematics/workspace.test.ts\n+++ b/e2e/schematics/workspace.test.ts\n@@ -82,7 +82,7 @@ describe('Nrwl Convert to Nx Workspace', () => {\n \n it('should generate a workspace and not change dependencies or devDependencies if they already exist', () => {\n // create a new AngularCLI app\n- runNgNew('--skip-install');\n+ runNgNew();\n const nxVersion = '0.0.0';\n const schematicsVersion = '0.0.0';\n const ngrxVersion = '0.0.0';\ndiff --git a/e2e/utils.ts b/e2e/utils.ts\nindex 422d866..a03104f 100644\n--- a/e2e/utils.ts\n+++ b/e2e/utils.ts\n@@ -17,8 +17,7 @@ export function newProject(): void {\n copyMissingPackages();\n execSync('mv ./tmp/proj ./tmp/proj_backup');\n }\n- execSync('cp -r ./tmp/proj_backup ./tmp/proj');\n- setUpSynLink();\n+ execSync('cp -a ./tmp/proj_backup ./tmp/proj');\n }\n \n export function copyMissingPackages(): void {\n@@ -26,14 +25,9 @@ export function copyMissingPackages(): void {\n modulesToCopy.forEach(m => copyNodeModule(projectName, m));\n }\n \n-export function setUpSynLink(): void {\n- execSync(`ln -s ../@nrwl/schematics/src/command-line/nx.js tmp/${projectName}/node_modules/.bin/nx`);\n- execSync(`chmod +x tmp/${projectName}/node_modules/.bin/nx`);\n-}\n-\n function copyNodeModule(path: string, name: string) {\n execSync(`rm -rf tmp/${path}/node_modules/${name}`);\n- execSync(`cp -r node_modules/${name} tmp/${path}/node_modules/${name}`);\n+ execSync(`cp -a node_modules/${name} tmp/${path}/node_modules/${name}`);\n }\n \n export function runCLI(\n@@ -43,7 +37,7 @@ export function runCLI(\n }\n ): string {\n try {\n- return execSync(`../../node_modules/.bin/ng ${command}`, {\n+ return execSync(`./node_modules/.bin/ng ${command}`, {\n cwd: `./tmp/${projectName}`\n })\n .toString()\n@@ -67,7 +61,7 @@ export function newLib(name: string): string {\n }\n \n export function runSchematic(command: string): string {\n- return execSync(`../../node_modules/.bin/schematics ${command}`, {\n+ return execSync(`./node_modules/.bin/schematics ${command}`, {\n cwd: `./tmp/${projectName}`\n }).toString();\n }\ndiff --git a/package.json b/package.json\nindex bef54f8..9186a58 100644\n--- a/package.json\n+++ b/package.json\n@@ -6,7 +6,7 @@\n \"private\": true,\n \"scripts\": {\n \"build\": \"./scripts/build.sh\",\n- \"e2e\": \"yarn build && ./scripts/e2e.sh\",\n+ \"e2e\": \"./scripts/e2e.sh\",\n \"format\": \"./scripts/format.sh\",\n \"linknpm\": \"./scripts/link.sh\",\n \"package\": \"./scripts/package.sh\",\n@@ -14,7 +14,7 @@\n \"copy\": \"./scripts/copy.sh\",\n \"test:schematics\": \"yarn build && ./scripts/test_schematics.sh\",\n \"test:nx\": \"yarn build && ./scripts/test_nx.sh\",\n- \"test\": \"yarn build && ./scripts/test_nx.sh && ./scripts/test_schematics.sh\",\n+ \"test\": \"yarn linknpm && ./scripts/test_nx.sh && ./scripts/test_schematics.sh\",\n \"checkformat\": \"./scripts/check-format.sh\",\n \"publish_npm\": \"./scripts/publish.sh\"\n },\ndiff --git a/packages/schematics/src/collection/workspace/index.ts b/packages/schematics/src/collection/workspace/index.ts\nindex 8f8897f..c70d161 100644\n--- a/packages/schematics/src/collection/workspace/index.ts\n+++ b/packages/schematics/src/collection/workspace/index.ts\n@@ -254,20 +254,7 @@ function moveFiles(options: Schema) {\n \n function copyAngularCliTgz() {\n return (host: Tree) => {\n- copyFile(\n- path.join(\n- 'node_modules',\n- '@nrwl',\n- 'schematics',\n- 'src',\n- 'collection',\n- 'application',\n- 'files',\n- '__directory__',\n- '.angular_cli.tgz'\n- ),\n- '.'\n- );\n+ copyFile(path.join(__dirname, '..', 'application', 'files', '__directory__', '.angular_cli.tgz'), '.');\n return host;\n };\n }\ndiff --git a/packages/schematics/src/command-line/affected.ts b/packages/schematics/src/command-line/affected.ts\nindex b7f9173..89a4f72 100644\n--- a/packages/schematics/src/command-line/affected.ts\n+++ b/packages/schematics/src/command-line/affected.ts\n@@ -1,5 +1,7 @@\n import { execSync } from 'child_process';\n import { getAffectedApps, parseFiles } from './shared';\n+import * as path from 'path';\n+import * as resolve from 'resolve';\n \n export function affected(args: string[]): void {\n const command = args[0];\n@@ -39,7 +41,7 @@ function build(apps: string[], rest: string[]) {\n if (apps.length > 0) {\n console.log(`Building ${apps.join(', ')}`);\n apps.forEach(app => {\n- execSync(`ng build ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n+ execSync(`${ngPath()} build ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n });\n } else {\n console.log('No apps to build');\n@@ -50,9 +52,13 @@ function e2e(apps: string[], rest: string[]) {\n if (apps.length > 0) {\n console.log(`Testing ${apps.join(', ')}`);\n apps.forEach(app => {\n- execSync(`ng e2e ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n+ execSync(`${ngPath()} e2e ${rest.join(' ')} -a=${app}`, { stdio: [0, 1, 2] });\n });\n } else {\n- console.log('No apps to tst');\n+ console.log('No apps to test');\n }\n }\n+\n+function ngPath() {\n+ return `${path.dirname(path.dirname(path.dirname(resolve.sync('@angular/cli', { basedir: __dirname }))))}/bin/ng`;\n+}\ndiff --git a/scripts/build.sh b/scripts/build.sh\nindex ac533b5..9b8891b 100755\n--- a/scripts/build.sh\n+++ b/scripts/build.sh\n@@ -3,6 +3,8 @@\n rm -rf build\n ngc\n rsync -a --exclude=*.ts packages/ build/packages\n+chmod +x build/packages/schematics/bin/create-nx-workspace.js\n+chmod +x build/packages/schematics/src/command-line/nx.js\n rm -rf build/packages/install\n cp README.md build/packages/schematics\n cp README.md build/packages/nx\n\\ No newline at end of file\n"]
| 1 |
["e0a977b2d316e7612b5d72cb02cd7d78e75dbc55"]
|
["build"]
|
fix deploy
|
["diff --git a/.github/workflows/deploy.yaml b/.github/workflows/deploy.yaml\nindex 3830f4c..3b14ee5 100644\n--- a/.github/workflows/deploy.yaml\n+++ b/.github/workflows/deploy.yaml\n@@ -67,7 +67,7 @@ jobs:\n run: aws s3 cp .next/static s3://cdn.rs.school/_next/static/ --recursive --cache-control \"public,max-age=15552000,immutable\"\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-client:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -117,7 +117,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-server:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n@@ -167,7 +167,7 @@ jobs:\n run: npm run build\n \n - name: Build container\n- run: docker buildx build --platform linux/amd64,linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n+ run: docker buildx build --platform linux/arm64 -t ghcr.io/rolling-scopes/rsschool-app-nestjs:master .\n \n - name: Login to GitHub Container Registry\n uses: docker/login-action@v1\n"]
| 1 |
["7785be09053049b30cf41b420c59f051cd0129fc"]
|
["cicd"]
|
remove unnecessary spotless definition
It receives this already from the parent pom.
|
["diff --git a/benchmarks/project/pom.xml b/benchmarks/project/pom.xml\nindex 62030b6..ab87dea 100644\n--- a/benchmarks/project/pom.xml\n+++ b/benchmarks/project/pom.xml\n@@ -123,11 +123,6 @@\n </plugin>\n \n <plugin>\n- <groupId>com.diffplug.spotless</groupId>\n- <artifactId>spotless-maven-plugin</artifactId>\n- </plugin>\n-\n- <plugin>\n <groupId>org.apache.maven.plugins</groupId>\n <artifactId>maven-shade-plugin</artifactId>\n <executions>\n"]
| 1 |
["7f9721dc9bbf66a3712d59352f64ca089da139f0"]
|
["build"]
|
change tests to depend on BrokerContext
|
["diff --git a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\nindex fe4e42d..37c7066 100644\n--- a/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n+++ b/broker/src/main/java/io/camunda/zeebe/broker/Broker.java\n@@ -7,20 +7,14 @@\n */\n package io.camunda.zeebe.broker;\n \n-import io.atomix.cluster.AtomixCluster;\n import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupContextImpl;\n import io.camunda.zeebe.broker.bootstrap.BrokerStartupProcess;\n-import io.camunda.zeebe.broker.clustering.ClusterServices;\n import io.camunda.zeebe.broker.exporter.repo.ExporterLoadException;\n import io.camunda.zeebe.broker.exporter.repo.ExporterRepository;\n-import io.camunda.zeebe.broker.partitioning.PartitionManager;\n-import io.camunda.zeebe.broker.system.EmbeddedGatewayService;\n import io.camunda.zeebe.broker.system.SystemContext;\n import io.camunda.zeebe.broker.system.configuration.BrokerCfg;\n-import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.broker.system.monitoring.BrokerHealthCheckService;\n-import io.camunda.zeebe.broker.system.monitoring.DiskSpaceUsageMonitor;\n import io.camunda.zeebe.protocol.impl.encoding.BrokerInfo;\n import io.camunda.zeebe.util.LogUtil;\n import io.camunda.zeebe.util.VersionUtil;\n@@ -184,35 +178,15 @@ public final class Broker implements AutoCloseable {\n }\n \n // only used for tests\n- public EmbeddedGatewayService getEmbeddedGatewayService() {\n- return brokerContext.getEmbeddedGatewayService();\n- }\n-\n- public AtomixCluster getAtomixCluster() {\n- return brokerContext.getAtomixCluster();\n- }\n-\n- public ClusterServices getClusterServices() {\n- return brokerContext.getClusterServices();\n- }\n-\n- public DiskSpaceUsageMonitor getDiskSpaceUsageMonitor() {\n- return brokerContext.getDiskSpaceUsageMonitor();\n- }\n-\n- public BrokerAdminService getBrokerAdminService() {\n- return brokerContext.getBrokerAdminService();\n+ public BrokerContext getBrokerContext() {\n+ return brokerContext;\n }\n \n+ // only used for tests\n public SystemContext getSystemContext() {\n return systemContext;\n }\n \n- public PartitionManager getPartitionManager() {\n- return brokerContext.getPartitionManager();\n- }\n- // only used for tests\n-\n /**\n * Temporary helper object. This object is needed during the transition of broker startup/shutdown\n * steps to the new concept. Afterwards, the expectation is that this object will merge with the\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\nindex bda5170..1accbc1 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/system/partitions/BrokerSnapshotTest.java\n@@ -45,11 +45,12 @@ public class BrokerSnapshotTest {\n (RaftPartition)\n brokerRule\n .getBroker()\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(PartitionId.from(PartitionManagerImpl.GROUP_NAME, PARTITION_ID));\n journalReader = raftPartition.getServer().openReader();\n- brokerAdminService = brokerRule.getBroker().getBrokerAdminService();\n+ brokerAdminService = brokerRule.getBroker().getBrokerContext().getBrokerAdminService();\n \n final String contactPoint = NetUtil.toSocketAddressString(brokerRule.getGatewayAddress());\n final ZeebeClientBuilder zeebeClientBuilder =\ndiff --git a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\nindex e98e7d2..a831bfe 100644\n--- a/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n+++ b/broker/src/test/java/io/camunda/zeebe/broker/test/EmbeddedBrokerRule.java\n@@ -173,11 +173,11 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n }\n \n public ClusterServices getClusterServices() {\n- return broker.getClusterServices();\n+ return broker.getBrokerContext().getClusterServices();\n }\n \n public AtomixCluster getAtomixCluster() {\n- return broker.getAtomixCluster();\n+ return broker.getBrokerContext().getAtomixCluster();\n }\n \n public InetSocketAddress getGatewayAddress() {\n@@ -245,7 +245,8 @@ public final class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\nindex 890b596..8561cf1 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ClusteringRule.java\n@@ -29,6 +29,7 @@ import io.atomix.utils.net.Address;\n import io.camunda.zeebe.broker.Broker;\n import io.camunda.zeebe.broker.PartitionListener;\n import io.camunda.zeebe.broker.SpringBrokerBridge;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.exporter.stream.ExporterDirectorContext;\n import io.camunda.zeebe.broker.partitioning.PartitionManagerImpl;\n import io.camunda.zeebe.broker.system.SystemContext;\n@@ -602,11 +603,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void stepDown(final Broker broker, final int partitionId) {\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == partitionId)\n .map(RaftPartition.class::cast)\n@@ -617,14 +618,14 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void disconnect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).stop().join();\n ((NettyMessagingService) atomix.getMessagingService()).stop().join();\n }\n \n public void connect(final Broker broker) {\n- final var atomix = broker.getAtomixCluster();\n+ final var atomix = broker.getBrokerContext().getAtomixCluster();\n \n ((NettyUnicastService) atomix.getUnicastService()).start().join();\n ((NettyMessagingService) atomix.getMessagingService()).start().join();\n@@ -666,11 +667,11 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n final var broker = brokers.get(expectedLeader);\n- final var atomix = broker.getClusterServices();\n+ final var atomix = broker.getBrokerContext().getClusterServices();\n final MemberId nodeId = atomix.getMembershipService().getLocalMember().id();\n \n final var raftPartition =\n- broker.getPartitionManager().getPartitionGroup().getPartitions().stream()\n+ broker.getBrokerContext().getPartitionManager().getPartitionGroup().getPartitions().stream()\n .filter(partition -> partition.members().contains(nodeId))\n .filter(partition -> partition.id().id() == START_PARTITION_ID)\n .map(RaftPartition.class::cast)\n@@ -775,14 +776,15 @@ public final class ClusteringRule extends ExternalResource {\n }\n \n public void takeSnapshot(final Broker broker) {\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n }\n \n public void triggerAndWaitForSnapshots() {\n // Ensure that the exporter positions are distributed to the followers\n getClock().addTime(ExporterDirectorContext.DEFAULT_DISTRIBUTION_INTERVAL);\n getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::takeSnapshot);\n \n getBrokers()\n@@ -794,7 +796,7 @@ public final class ClusteringRule extends ExternalResource {\n .until(\n () -> {\n // Trigger snapshot again in case snapshot is not already taken\n- broker.getBrokerAdminService().takeSnapshot();\n+ broker.getBrokerContext().getBrokerAdminService().takeSnapshot();\n return getSnapshot(broker);\n },\n Optional::isPresent));\n@@ -831,7 +833,7 @@ public final class ClusteringRule extends ExternalResource {\n \n private Optional<SnapshotId> getSnapshot(final Broker broker, final int partitionId) {\n \n- final var partitions = broker.getBrokerAdminService().getPartitionStatus();\n+ final var partitions = broker.getBrokerContext().getBrokerAdminService().getPartitionStatus();\n final var partitionStatus = partitions.get(partitionId);\n \n return Optional.ofNullable(partitionStatus)\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\nindex f07961c..d46636b 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceMonitoringFailOverTest.java\n@@ -88,6 +88,7 @@ public class DiskSpaceMonitoringFailOverTest {\n () ->\n clusteringRule\n .getBroker(newLeaderId)\n+ .getBrokerContext()\n .getBrokerAdminService()\n .getPartitionStatus()\n .get(1)\n@@ -96,7 +97,7 @@ public class DiskSpaceMonitoringFailOverTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\nindex 0a02a27..6e93cf9 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryClusteredTest.java\n@@ -165,7 +165,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -188,7 +188,7 @@ public class DiskSpaceRecoveryClusteredTest {\n }\n \n private void waitUntilDiskSpaceAvailable(final Broker broker) throws InterruptedException {\n- final var diskSpaceMonitor = broker.getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor = broker.getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\nindex 9cef5a0..a487729 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/DiskSpaceRecoveryTest.java\n@@ -192,7 +192,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceNotAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n \n final CountDownLatch diskSpaceNotAvailable = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n@@ -215,7 +216,8 @@ public class DiskSpaceRecoveryTest {\n }\n \n private void waitUntilDiskSpaceAvailable() throws InterruptedException {\n- final var diskSpaceMonitor = embeddedBrokerRule.getBroker().getDiskSpaceUsageMonitor();\n+ final var diskSpaceMonitor =\n+ embeddedBrokerRule.getBroker().getBrokerContext().getDiskSpaceUsageMonitor();\n final CountDownLatch diskSpaceAvailableAgain = new CountDownLatch(1);\n diskSpaceMonitor.addDiskUsageListener(\n new DiskSpaceUsageListener() {\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\nindex 2d1e4f0..58f6f16 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/health/HealthMonitoringTest.java\n@@ -48,6 +48,7 @@ public class HealthMonitoringTest {\n final var raftPartition =\n (RaftPartition)\n leader\n+ .getBrokerContext()\n .getPartitionManager()\n .getPartitionGroup()\n .getPartition(\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\nindex 468f83c..7ff03be 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceClusterTest.java\n@@ -11,6 +11,7 @@ import static org.assertj.core.api.Assertions.assertThat;\n \n import io.atomix.raft.RaftServer.Role;\n import io.camunda.zeebe.broker.Broker;\n+import io.camunda.zeebe.broker.bootstrap.BrokerContext;\n import io.camunda.zeebe.broker.system.management.BrokerAdminService;\n import io.camunda.zeebe.engine.processing.streamprocessor.StreamProcessor.Phase;\n import io.camunda.zeebe.it.clustering.ClusteringRule;\n@@ -48,7 +49,7 @@ public class BrokerAdminServiceClusterTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -61,7 +62,8 @@ public class BrokerAdminServiceClusterTest {\n // when\n final var followerStatus =\n followers.stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .map(BrokerAdminService::getPartitionStatus)\n .map(status -> status.get(1));\n \n@@ -94,7 +96,8 @@ public class BrokerAdminServiceClusterTest {\n \n // then\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(this::assertThatStatusContainsProcessedPositionInSnapshot);\n }\n \n@@ -102,7 +105,8 @@ public class BrokerAdminServiceClusterTest {\n public void shouldPauseAfterLeaderChange() {\n // given\n clusteringRule.getBrokers().stream()\n- .map(Broker::getBrokerAdminService)\n+ .map(Broker::getBrokerContext)\n+ .map(BrokerContext::getBrokerAdminService)\n .forEach(BrokerAdminService::pauseStreamProcessing);\n \n // when\n@@ -113,6 +117,7 @@ public class BrokerAdminServiceClusterTest {\n final var newLeaderAdminService =\n clusteringRule\n .getBroker(clusteringRule.getLeaderForPartition(1).getNodeId())\n+ .getBrokerContext()\n .getBrokerAdminService();\n assertStreamProcessorPhase(newLeaderAdminService, Phase.PAUSED);\n }\ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\nindex 5160b50..2185329 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceTest.java\n@@ -41,7 +41,7 @@ public class BrokerAdminServiceTest {\n @Before\n public void before() {\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n }\n \n @Test\n@@ -144,7 +144,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PAUSED);\n }\n \n@@ -161,7 +161,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertStreamProcessorPhase(leaderAdminService, Phase.PROCESSING);\n }\n \n@@ -176,7 +176,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.PAUSED);\n }\n \n@@ -193,7 +193,7 @@ public class BrokerAdminServiceTest {\n \n // then\n leader = clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- leaderAdminService = leader.getBrokerAdminService();\n+ leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n assertExporterPhase(leaderAdminService, ExporterPhase.EXPORTING);\n }\n \ndiff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\nindex d6c8ab3..4582ad2 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/system/BrokerAdminServiceWithOutExporterTest.java\n@@ -30,7 +30,7 @@ public class BrokerAdminServiceWithOutExporterTest {\n // given\n final var leader =\n clusteringRule.getBroker(clusteringRule.getLeaderForPartition(1).getNodeId());\n- final var leaderAdminService = leader.getBrokerAdminService();\n+ final var leaderAdminService = leader.getBrokerContext().getBrokerAdminService();\n // when there are no exporters configured\n // then\n final var partitionStatus = leaderAdminService.getPartitionStatus().get(1);\ndiff --git a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\nindex 36bc0bf..d332201 100644\n--- a/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n+++ b/test/src/main/java/io/camunda/zeebe/test/EmbeddedBrokerRule.java\n@@ -240,7 +240,8 @@ public class EmbeddedBrokerRule extends ExternalResource {\n Thread.currentThread().interrupt();\n }\n \n- final EmbeddedGatewayService embeddedGatewayService = broker.getEmbeddedGatewayService();\n+ final EmbeddedGatewayService embeddedGatewayService =\n+ broker.getBrokerContext().getEmbeddedGatewayService();\n if (embeddedGatewayService != null) {\n final BrokerClient brokerClient = embeddedGatewayService.get().getBrokerClient();\n \n"]
| 1 |
["e52a6201093f273add4903dd5f4e55a63539386d"]
|
["refactor"]
|
Added tooltip for Data sources table buttons only on small screen
|
["diff --git a/packages/nc-gui/components/dashboard/settings/DataSources.vue b/packages/nc-gui/components/dashboard/settings/DataSources.vue\nindex 78caa98..0ed5df9 100644\n--- a/packages/nc-gui/components/dashboard/settings/DataSources.vue\n+++ b/packages/nc-gui/components/dashboard/settings/DataSources.vue\n@@ -351,59 +351,78 @@ const isEditBaseModalOpen = computed({\n \n <div class=\"ds-table-col ds-table-actions\">\n <div class=\"flex items-center gap-2\">\n- <NcButton\n- v-if=\"!sources[0].is_meta && !sources[0].is_local\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- size=\"small\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.Metadata)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('tooltip.metaSync') }}\n+ <NcTooltip v-if=\"!sources[0].is_meta && !sources[0].is_local\" overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('tooltip.metaSync') }}\n+ </template>\n+ <NcButton\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ size=\"small\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.Metadata)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('tooltip.metaSync') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.ERD)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.relations') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.relations') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.ERD)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.relations') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.UIAcl)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('labels.uiAcl') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('labels.uiAcl') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.UIAcl)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('labels.uiAcl') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(sources[0].id, DataSourcesSubTab.Audit)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"book\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.audit') }}\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.audit') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(sources[0].id, DataSourcesSubTab.Audit)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"book\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.audit') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n+ </NcButton>\n+ </NcTooltip>\n </div>\n </div>\n <div class=\"ds-table-col ds-table-crud\">\n@@ -450,67 +469,92 @@ const isEditBaseModalOpen = computed({\n \n <div class=\"ds-table-col ds-table-actions\">\n <div class=\"flex items-center gap-2\">\n- <NcButton\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- type=\"text\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.ERD)\"\n- >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('title.relations') }}\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('title.relations') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ type=\"text\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.ERD)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"erd\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('title.relations') }}\n+ </div>\n </div>\n- </div>\n- </NcButton>\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('labels.uiAcl') }}\n+ </template>\n+ <NcButton\n+ size=\"small\"\n+ type=\"text\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.UIAcl)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('labels.uiAcl') }}\n+ </div>\n+ </div>\n+ </NcButton>\n+ </NcTooltip>\n+ <NcTooltip overlay-class-name=\"!xl:hidden\">\n+ <template #title>\n+ {{ $t('tooltip.metaSync') }}\n+ </template>\n+ <NcButton\n+ v-if=\"!source.is_meta && !source.is_local\"\n+ size=\"small\"\n+ type=\"text\"\n+ class=\"nc-action-btn cursor-pointer outline-0\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.Metadata)\"\n+ >\n+ <div class=\"flex items-center gap-2 text-gray-600\">\n+ <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n+ <div class=\"nc-action-btn-label\">\n+ {{ $t('tooltip.metaSync') }}\n+ </div>\n+ </div>\n+ </NcButton>\n+ </NcTooltip>\n+ </div>\n+ </div>\n+ <div class=\"ds-table-col ds-table-crud justify-end gap-x-1\">\n+ <NcTooltip>\n+ <template #title>\n+ {{ $t('general.edit') }}\n+ </template>\n <NcButton\n+ v-if=\"!source.is_meta && !source.is_local\"\n size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n type=\"text\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.UIAcl)\"\n+ @click=\"baseAction(source.id, DataSourcesSubTab.Edit)\"\n >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"acl\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('labels.uiAcl') }}\n- </div>\n- </div>\n+ <GeneralIcon icon=\"edit\" class=\"text-gray-600 -mt-0.5\" />\n </NcButton>\n+ </NcTooltip>\n+ <NcTooltip>\n+ <template #title>\n+ {{ $t('general.delete') }}\n+ </template>\n <NcButton\n v-if=\"!source.is_meta && !source.is_local\"\n size=\"small\"\n+ class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n type=\"text\"\n- class=\"nc-action-btn cursor-pointer outline-0\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.Metadata)\"\n+ @click=\"openDeleteBase(source)\"\n >\n- <div class=\"flex items-center gap-2 text-gray-600\">\n- <GeneralIcon icon=\"sync\" class=\"group-hover:text-accent\" />\n- <div class=\"nc-action-btn-label\">\n- {{ $t('tooltip.metaSync') }}\n- </div>\n- </div>\n+ <GeneralIcon icon=\"delete\" class=\"text-red-500 -mt-0.5\" />\n </NcButton>\n- </div>\n- </div>\n- <div class=\"ds-table-col ds-table-crud justify-end gap-x-1\">\n- <NcButton\n- v-if=\"!source.is_meta && !source.is_local\"\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n- type=\"text\"\n- @click=\"baseAction(source.id, DataSourcesSubTab.Edit)\"\n- >\n- <GeneralIcon icon=\"edit\" class=\"text-gray-600 -mt-0.5\" />\n- </NcButton>\n- <NcButton\n- v-if=\"!source.is_meta && !source.is_local\"\n- size=\"small\"\n- class=\"nc-action-btn cursor-pointer outline-0 !w-8 !px-1 !rounded-lg mt-0.5\"\n- type=\"text\"\n- @click=\"openDeleteBase(source)\"\n- >\n- <GeneralIcon icon=\"delete\" class=\"text-red-500 -mt-0.5\" />\n- </NcButton>\n+ </NcTooltip>\n </div>\n </div>\n </template>\ndiff --git a/packages/nc-gui/components/nc/Tooltip.vue b/packages/nc-gui/components/nc/Tooltip.vue\nindex 0810b8b..97b159e 100644\n--- a/packages/nc-gui/components/nc/Tooltip.vue\n+++ b/packages/nc-gui/components/nc/Tooltip.vue\n@@ -12,6 +12,7 @@ interface Props {\n disabled?: boolean\n placement?: TooltipPlacement | undefined\n hideOnClick?: boolean\n+ overlayClassName?: string\n }\n \n const props = defineProps<Props>()\n@@ -36,6 +37,8 @@ const attrs = useAttrs()\n \n const isKeyPressed = ref(false)\n \n+const overlayClassName = computed(() => props.overlayClassName)\n+\n onKeyStroke(\n (e) => e.key === modifierKey.value,\n (e) => {\n@@ -100,7 +103,7 @@ const onClick = () => {\n <template>\n <a-tooltip\n v-model:visible=\"showTooltip\"\n- :overlay-class-name=\"`nc-tooltip ${showTooltip ? 'visible' : 'hidden'}`\"\n+ :overlay-class-name=\"`nc-tooltip ${showTooltip ? 'visible' : 'hidden'} ${overlayClassName}`\"\n :overlay-style=\"tooltipStyle\"\n arrow-point-at-center\n :trigger=\"[]\"\n"]
| 1 |
["a75538817c20fc4132718fd7b586bf835a5795e3"]
|
["feat"]
|
remove unnecessary start argument from `range`
|
["diff --git a/ibis/backends/dask/tests/execution/test_window.py b/ibis/backends/dask/tests/execution/test_window.py\nindex 75a7331..6bfc5e3 100644\n--- a/ibis/backends/dask/tests/execution/test_window.py\n+++ b/ibis/backends/dask/tests/execution/test_window.py\n@@ -489,7 +489,7 @@ def test_project_list_scalar(npartitions):\n expr = table.mutate(res=table.ints.quantile([0.5, 0.95]))\n result = expr.execute()\n \n- expected = pd.Series([[1.0, 1.9] for _ in range(0, 3)], name=\"res\")\n+ expected = pd.Series([[1.0, 1.9] for _ in range(3)], name=\"res\")\n tm.assert_series_equal(result.res, expected)\n \n \ndiff --git a/ibis/backends/pandas/tests/execution/test_window.py b/ibis/backends/pandas/tests/execution/test_window.py\nindex 8f292b3..effa372 100644\n--- a/ibis/backends/pandas/tests/execution/test_window.py\n+++ b/ibis/backends/pandas/tests/execution/test_window.py\n@@ -436,7 +436,7 @@ def test_project_list_scalar():\n expr = table.mutate(res=table.ints.quantile([0.5, 0.95]))\n result = expr.execute()\n \n- expected = pd.Series([[1.0, 1.9] for _ in range(0, 3)], name=\"res\")\n+ expected = pd.Series([[1.0, 1.9] for _ in range(3)], name=\"res\")\n tm.assert_series_equal(result.res, expected)\n \n \ndiff --git a/ibis/backends/pyspark/tests/test_basic.py b/ibis/backends/pyspark/tests/test_basic.py\nindex 3850919..14fe677 100644\n--- a/ibis/backends/pyspark/tests/test_basic.py\n+++ b/ibis/backends/pyspark/tests/test_basic.py\n@@ -19,7 +19,7 @@ from ibis.backends.pyspark.compiler import _can_be_replaced_by_column_name # no\n def test_basic(con):\n table = con.table(\"basic_table\")\n result = table.compile().toPandas()\n- expected = pd.DataFrame({\"id\": range(0, 10), \"str_col\": \"value\"})\n+ expected = pd.DataFrame({\"id\": range(10), \"str_col\": \"value\"})\n \n tm.assert_frame_equal(result, expected)\n \n@@ -28,9 +28,7 @@ def test_projection(con):\n table = con.table(\"basic_table\")\n result1 = table.mutate(v=table[\"id\"]).compile().toPandas()\n \n- expected1 = pd.DataFrame(\n- {\"id\": range(0, 10), \"str_col\": \"value\", \"v\": range(0, 10)}\n- )\n+ expected1 = pd.DataFrame({\"id\": range(10), \"str_col\": \"value\", \"v\": range(10)})\n \n result2 = (\n table.mutate(v=table[\"id\"])\n@@ -44,8 +42,8 @@ def test_projection(con):\n {\n \"id\": range(0, 20, 2),\n \"str_col\": \"value\",\n- \"v\": range(0, 10),\n- \"v2\": range(0, 10),\n+ \"v\": range(10),\n+ \"v2\": range(10),\n }\n )\n \n"]
| 1 |
["15f8d95754a0b6865ea475ca9e515272a07bf6ba"]
|
["refactor"]
|
i18n for Time Picker
|
["diff --git a/packages/nc-gui/components/cell/TimePicker.vue b/packages/nc-gui/components/cell/TimePicker.vue\nindex 619ab45..7f66828 100644\n--- a/packages/nc-gui/components/cell/TimePicker.vue\n+++ b/packages/nc-gui/components/cell/TimePicker.vue\n@@ -38,6 +38,8 @@ const isTimeInvalid = ref(false)\n \n const dateFormat = isMysql(column.value.base_id) ? 'YYYY-MM-DD HH:mm:ss' : 'YYYY-MM-DD HH:mm:ssZ'\n \n+const { t } = useI18n()\n+\n const localState = computed({\n get() {\n if (!modelValue) {\n@@ -89,11 +91,11 @@ watch(\n \n const placeholder = computed(() => {\n if (isEditColumn.value && (modelValue === '' || modelValue === null)) {\n- return '(Optional)'\n+ return t('labels.optional')\n } else if (modelValue === null && showNull.value) {\n- return 'NULL'\n+ return t('general.null')\n } else if (isTimeInvalid.value) {\n- return 'Invalid time'\n+ return t('msg.invalidTime')\n } else {\n return ''\n }\n"]
| 1 |
["48806e3675c7b18327e7629827454d7c29be25a9"]
|
["fix"]
|
conditionals and iterators in rsx
|
["diff --git a/packages/interpreter/src/interpreter.js b/packages/interpreter/src/interpreter.js\nindex 2f5c06f..58613ea 100644\n--- a/packages/interpreter/src/interpreter.js\n+++ b/packages/interpreter/src/interpreter.js\n@@ -172,7 +172,7 @@ export class Interpreter {\n node.style = {};\n }\n node.style[name] = value;\n- } else if (ns != null || ns != undefined) {\n+ } else if (ns != null && ns != undefined) {\n node.setAttributeNS(ns, name, value);\n } else {\n switch (name) {\n@@ -266,7 +266,7 @@ export class Interpreter {\n this.AssignId(edit.path, edit.id);\n break;\n case \"CreateElement\":\n- if (edit.namespace !== null || edit.namespace !== undefined) {\n+ if (edit.namespace !== null && edit.namespace !== undefined) {\n this.CreateElementNs(edit.name, edit.id, edit.namespace);\n } else {\n this.CreateElement(edit.name, edit.id);\ndiff --git a/packages/rsx/src/lib.rs b/packages/rsx/src/lib.rs\nindex 09c6bd6..d974a6c 100644\n--- a/packages/rsx/src/lib.rs\n+++ b/packages/rsx/src/lib.rs\n@@ -245,7 +245,11 @@ impl<'a> DynamicContext<'a> {\n quote! { ::dioxus::core::TemplateNode::Text(#text) }\n }\n \n- BodyNode::Text(_) | BodyNode::RawExpr(_) | BodyNode::Component(_) => {\n+ BodyNode::RawExpr(_)\n+ | BodyNode::Text(_)\n+ | BodyNode::ForLoop(_)\n+ | BodyNode::IfChain(_)\n+ | BodyNode::Component(_) => {\n let ct = self.dynamic_nodes.len();\n self.dynamic_nodes.push(root);\n self.node_paths.push(self.current_path.clone());\ndiff --git a/packages/rsx/src/node.rs b/packages/rsx/src/node.rs\nindex 4013c9c..7b4bd23 100644\n--- a/packages/rsx/src/node.rs\n+++ b/packages/rsx/src/node.rs\n@@ -5,7 +5,7 @@ use quote::{quote, ToTokens, TokenStreamExt};\n use syn::{\n parse::{Parse, ParseStream},\n spanned::Spanned,\n- token, Expr, LitStr, Result,\n+ token, Block, Expr, ExprIf, LitStr, Pat, Result,\n };\n \n /*\n@@ -20,6 +20,8 @@ Parse\n pub enum BodyNode {\n Element(Element),\n Component(Component),\n+ ForLoop(ForLoop),\n+ IfChain(ExprIf),\n Text(IfmtInput),\n RawExpr(Expr),\n }\n@@ -35,6 +37,8 @@ impl BodyNode {\n BodyNode::Component(component) => component.name.span(),\n BodyNode::Text(text) => text.source.span(),\n BodyNode::RawExpr(exp) => exp.span(),\n+ BodyNode::ForLoop(fl) => fl.for_token.span(),\n+ BodyNode::IfChain(f) => f.if_token.span(),\n }\n }\n }\n@@ -89,6 +93,28 @@ impl Parse for BodyNode {\n }\n }\n \n+ // Transform for loops into into_iter calls\n+ if stream.peek(Token![for]) {\n+ let _f = stream.parse::<Token![for]>()?;\n+ let pat = stream.parse::<Pat>()?;\n+ let _i = stream.parse::<Token![in]>()?;\n+ let expr = stream.parse::<Box<Expr>>()?;\n+ let body = stream.parse::<Block>()?;\n+\n+ return Ok(BodyNode::ForLoop(ForLoop {\n+ for_token: _f,\n+ pat,\n+ in_token: _i,\n+ expr,\n+ body,\n+ }));\n+ }\n+\n+ // Transform unterminated if statements into terminated optional if statements\n+ if stream.peek(Token![if]) {\n+ return Ok(BodyNode::IfChain(stream.parse()?));\n+ }\n+\n Ok(BodyNode::RawExpr(stream.parse::<Expr>()?))\n }\n }\n@@ -104,6 +130,104 @@ impl ToTokens for BodyNode {\n BodyNode::RawExpr(exp) => tokens.append_all(quote! {\n __cx.fragment_from_iter(#exp)\n }),\n+ BodyNode::ForLoop(exp) => {\n+ let ForLoop {\n+ pat, expr, body, ..\n+ } = exp;\n+\n+ tokens.append_all(quote! {\n+ __cx.fragment_from_iter(\n+ (#expr).into_iter().map(|#pat| {\n+ #body\n+ })\n+ )\n+ })\n+ }\n+ BodyNode::IfChain(chain) => {\n+ if is_if_chain_terminated(chain) {\n+ tokens.append_all(quote! {\n+ __cx.fragment_from_iter(#chain)\n+ });\n+ } else {\n+ let ExprIf {\n+ cond,\n+ then_branch,\n+ else_branch,\n+ ..\n+ } = chain;\n+\n+ let mut body = TokenStream2::new();\n+\n+ body.append_all(quote! {\n+ if #cond {\n+ Some(#then_branch)\n+ }\n+ });\n+\n+ let mut elif = else_branch;\n+\n+ while let Some((_, ref branch)) = elif {\n+ match branch.as_ref() {\n+ Expr::If(ref eelif) => {\n+ let ExprIf {\n+ cond,\n+ then_branch,\n+ else_branch,\n+ ..\n+ } = eelif;\n+\n+ body.append_all(quote! {\n+ else if #cond {\n+ Some(#then_branch)\n+ }\n+ });\n+\n+ elif = else_branch;\n+ }\n+ _ => {\n+ body.append_all(quote! {\n+ else {\n+ #branch\n+ }\n+ });\n+ break;\n+ }\n+ }\n+ }\n+\n+ body.append_all(quote! {\n+ else { None }\n+ });\n+\n+ tokens.append_all(quote! {\n+ __cx.fragment_from_iter(#body)\n+ });\n+ }\n+ }\n+ }\n+ }\n+}\n+\n+#[derive(PartialEq, Eq, Clone, Debug, Hash)]\n+pub struct ForLoop {\n+ pub for_token: Token![for],\n+ pub pat: Pat,\n+ pub in_token: Token![in],\n+ pub expr: Box<Expr>,\n+ pub body: Block,\n+}\n+\n+fn is_if_chain_terminated(chain: &ExprIf) -> bool {\n+ let mut current = chain;\n+ loop {\n+ if let Some((_, else_block)) = ¤t.else_branch {\n+ if let Expr::If(else_if) = else_block.as_ref() {\n+ current = else_if;\n+ } else {\n+ return true;\n+ }\n+ } else {\n+ return false;\n }\n }\n }\n"]
| 1 |
["6b473cbdc5997af47c56a2a74f5b64da6d4c2ad7"]
|
["feat"]
|
update build
|
["diff --git a/bootstrap/scripts/publish-patch.sh b/bootstrap/scripts/publish-patch.sh\nindex a1b6f12..0d849a5 100755\n--- a/bootstrap/scripts/publish-patch.sh\n+++ b/bootstrap/scripts/publish-patch.sh\n@@ -5,4 +5,4 @@ lerna version patch\n lerna publish from-package -y\n git push\n \n-./pack_and_install.sh\n\\ No newline at end of file\n+./bootstrap/scripts/pack_and_install.sh\n\\ No newline at end of file\n"]
| 1 |
["3fcfb20b0feb371b357edc42fcb7c87085c9b82a"]
|
["build"]
|
remove unused
|
["diff --git a/src/content/redux/modules/dictionaries.ts b/src/content/redux/modules/dictionaries.ts\nindex 88f7215..570d397 100644\n--- a/src/content/redux/modules/dictionaries.ts\n+++ b/src/content/redux/modules/dictionaries.ts\n@@ -3,7 +3,6 @@ import { DictID, appConfigFactory, AppConfig } from '@/app-config'\n import isEqual from 'lodash/isEqual'\n import { saveWord } from '@/_helpers/record-manager'\n import { getDefaultSelectionInfo, SelectionInfo, isSameSelection } from '@/_helpers/selection'\n-import { createActiveConfigStream } from '@/_helpers/config-manager'\n import { isContainChinese, isContainEnglish, testerPunct, isContainMinor, testerChinese, testJapanese, testKorean } from '@/_helpers/lang-check'\n import { MsgType, MsgFetchDictResult } from '@/typings/message'\n import { StoreState, DispatcherThunk, Dispatcher } from './index'\ndiff --git a/src/content/redux/modules/widget.ts b/src/content/redux/modules/widget.ts\nindex 53ad550..68e0a3d 100644\n--- a/src/content/redux/modules/widget.ts\n+++ b/src/content/redux/modules/widget.ts\n@@ -1,9 +1,9 @@\n import * as recordManager from '@/_helpers/record-manager'\n import { StoreState, DispatcherThunk, Dispatcher } from './index'\n-import appConfigFactory, { TCDirection, AppConfig, DictID } from '@/app-config'\n+import appConfigFactory, { TCDirection, DictID } from '@/app-config'\n import { message, storage } from '@/_helpers/browser-api'\n-import { createActiveConfigStream, createConfigIDListStream } from '@/_helpers/config-manager'\n-import { MsgSelection, MsgType, MsgTempDisabledState, MsgEditWord, MsgOpenUrl, MsgFetchDictResult } from '@/typings/message'\n+import { createConfigIDListStream } from '@/_helpers/config-manager'\n+import { MsgType, MsgTempDisabledState, MsgEditWord, MsgOpenUrl, MsgFetchDictResult } from '@/typings/message'\n import { searchText, restoreDicts } from '@/content/redux/modules/dictionaries'\n import { SelectionInfo, getDefaultSelectionInfo } from '@/_helpers/selection'\n import { Mutable } from '@/typings/helpers'\n"]
| 1 |
["a50b51999015e210918d9c8e95fd4cac347353be"]
|
["refactor"]
|
use an action for issue assignment
|
["diff --git a/.github/workflows/assign.yml b/.github/workflows/assign.yml\nindex 29d92a8..758874e 100644\n--- a/.github/workflows/assign.yml\n+++ b/.github/workflows/assign.yml\n@@ -8,8 +8,6 @@ jobs:\n runs-on: ubuntu-latest\n if: ${{ github.event.comment.body == '/take' }}\n steps:\n- - uses: actions/checkout@v2\n- - name: Assign issue ${{ github.event.issue.number }} to ${{ github.event.comment.user.login }}\n- run: gh issue edit ${{ github.event.issue.number }} --add-assignee \"${{ github.event.comment.user.login }}\"\n- env:\n- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}\n+ - uses: pozil/auto-assign-issue@v1.1.0\n+ with:\n+ assignees: ${{ github.event.comment.user.login }}\n"]
| 1 |
["fb3a231b29bc8bff9270b99dd4aff9dad599f21f"]
|
["cicd"]
|
add canonical `_name` to edge packages
|
["diff --git a/scripts/bump-edge.ts b/scripts/bump-edge.ts\nindex e92e3c9..0b7a11a 100644\n--- a/scripts/bump-edge.ts\n+++ b/scripts/bump-edge.ts\n@@ -53,6 +53,7 @@ async function loadWorkspace (dir: string) {\n }\n \n const rename = (from: string, to: string) => {\n+ find(from).data._name = find(from).data.name\n find(from).data.name = to\n for (const pkg of packages) {\n pkg.updateDeps((dep) => {\n"]
| 1 |
["573f87edf9bdc19c9c4c3a978fad6ed3ce788f5f"]
|
["build"]
|
remove duplicated variables
|
["diff --git a/packages/core/src/components/item/item.ios.scss b/packages/core/src/components/item/item.ios.scss\nindex 4de5455..6c4d11a 100644\n--- a/packages/core/src/components/item/item.ios.scss\n+++ b/packages/core/src/components/item/item.ios.scss\n@@ -47,15 +47,6 @@ $item-ios-detail-push-color: $list-ios-border-color !default;\n /// @prop - Icon for the detail arrow\n $item-ios-detail-push-svg: \"<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 20'><path d='M2,20l-2-2l8-8L0,2l2-2l10,10L2,20z' fill='#{$item-ios-detail-push-color}'/></svg>\" !default;\n \n-/// @prop - Background for the divider\n-$item-ios-divider-background: #f7f7f7 !default;\n-\n-/// @prop - Color for the divider\n-$item-ios-divider-color: #222 !default;\n-\n-/// @prop - Padding for the divider\n-$item-ios-divider-padding: 5px 15px !default;\n-\n \n // iOS Item\n // --------------------------------------------------\ndiff --git a/packages/core/src/components/item/item.md.scss b/packages/core/src/components/item/item.md.scss\nindex 1dd1800..3dadbc0 100644\n--- a/packages/core/src/components/item/item.md.scss\n+++ b/packages/core/src/components/item/item.md.scss\n@@ -35,21 +35,6 @@ $item-md-detail-push-color: $list-md-border-color !default;\n /// @prop - Icon for the detail arrow\n $item-md-detail-push-svg: \"<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 20'><path d='M2,20l-2-2l8-8L0,2l2-2l10,10L2,20z' fill='#{$item-md-detail-push-color}'/></svg>\" !default;\n \n-/// @prop - Color for the divider\n-$item-md-divider-color: #858585 !default;\n-\n-/// @prop - Background for the divider\n-$item-md-divider-background: #fff !default;\n-\n-/// @prop - Font size for the divider\n-$item-md-divider-font-size: $item-md-body-text-font-size !default;\n-\n-/// @prop - Border bottom for the divider\n-$item-md-divider-border-bottom: 1px solid $list-md-border-color !default;\n-\n-/// @prop - Padding for the divider\n-$item-md-divider-padding: 5px 15px !default;\n-\n \n .item-md {\n @include padding-horizontal($item-md-padding-start, 0);\ndiff --git a/packages/core/src/components/item/item.wp.scss b/packages/core/src/components/item/item.wp.scss\nindex 2c4aae6..07b9266 100644\n--- a/packages/core/src/components/item/item.wp.scss\n+++ b/packages/core/src/components/item/item.wp.scss\n@@ -41,21 +41,6 @@ $item-wp-detail-push-color: $input-wp-border-color !default;\n /// @prop - Icon for the detail arrow\n $item-wp-detail-push-svg: \"<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 12 20'><path d='M2,20l-2-2l8-8L0,2l2-2l10,10L2,20z' fill='#{$item-wp-detail-push-color}'/></svg>\" !default;\n \n-/// @prop - Color for the divider\n-$item-wp-divider-color: $list-wp-text-color !default;\n-\n-/// @prop - Background for the divider\n-$item-wp-divider-background: #fff !default;\n-\n-/// @prop - Bodrer bottom for the divider\n-$item-wp-divider-border-bottom: 1px solid $list-wp-border-color !default;\n-\n-/// @prop - Font size for the divider\n-$item-wp-divider-font-size: 2rem !default;\n-\n-/// @prop - Padding for the divider\n-$item-wp-divider-padding: 5px 15px !default;\n-\n \n .item-wp {\n @include padding-horizontal($item-wp-padding-start, 0);\n"]
| 1 |
["cd7e8c3d3549ea05115b3f02586eeba894d86906"]
|
["refactor"]
|
fix `memtable` docstrings
|
["diff --git a/ibis/expr/api.py b/ibis/expr/api.py\nindex 93fabaa..66a2ea9 100644\n--- a/ibis/expr/api.py\n+++ b/ibis/expr/api.py\n@@ -403,15 +403,21 @@ def memtable(\n >>> import ibis\n >>> t = ibis.memtable([{\"a\": 1}, {\"a\": 2}])\n >>> t\n+ PandasInMemoryTable\n+ data:\n+ DataFrameProxy:\n+ a\n+ 0 1\n+ 1 2\n \n >>> t = ibis.memtable([{\"a\": 1, \"b\": \"foo\"}, {\"a\": 2, \"b\": \"baz\"}])\n >>> t\n PandasInMemoryTable\n data:\n- ((1, 'foo'), (2, 'baz'))\n- schema:\n- a int8\n- b string\n+ DataFrameProxy:\n+ a b\n+ 0 1 foo\n+ 1 2 baz\n \n Create a table literal without column names embedded in the data and pass\n `columns`\n@@ -420,10 +426,22 @@ def memtable(\n >>> t\n PandasInMemoryTable\n data:\n- ((1, 'foo'), (2, 'baz'))\n- schema:\n- a int8\n- b string\n+ DataFrameProxy:\n+ a b\n+ 0 1 foo\n+ 1 2 baz\n+\n+ Create a table literal without column names embedded in the data. Ibis\n+ generates column names if none are provided.\n+\n+ >>> t = ibis.memtable([(1, \"foo\"), (2, \"baz\")])\n+ >>> t\n+ PandasInMemoryTable\n+ data:\n+ DataFrameProxy:\n+ col0 col1\n+ 0 1 foo\n+ 1 2 baz\n \"\"\"\n if columns is not None and schema is not None:\n raise NotImplementedError(\n"]
| 1 |
["72bc0f5172c0a3d17bde29cfc00db4c60d2fee3a"]
|
["docs"]
|
reintroduce timeout for assertion
The timeout had been removed by a previous commit. Without the timeout the test might be flaky.
Also removed obsolete code
|
["diff --git a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\nindex d0ee4f3..c2ab83c 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/processing/streamprocessor/StreamProcessorReplayModeTest.java\n@@ -13,6 +13,7 @@ import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ACTI\n import static io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent.ELEMENT_ACTIVATING;\n import static java.util.function.Predicate.isEqual;\n import static org.assertj.core.api.Assertions.assertThat;\n+import static org.awaitility.Awaitility.await;\n import static org.mockito.ArgumentMatchers.any;\n import static org.mockito.ArgumentMatchers.anyLong;\n import static org.mockito.ArgumentMatchers.eq;\n@@ -30,7 +31,6 @@ import io.camunda.zeebe.protocol.record.intent.ProcessInstanceIntent;\n import io.camunda.zeebe.streamprocessor.StreamProcessor;\n import io.camunda.zeebe.streamprocessor.StreamProcessor.Phase;\n import io.camunda.zeebe.streamprocessor.StreamProcessorMode;\n-import org.awaitility.Awaitility;\n import org.junit.Rule;\n import org.junit.Test;\n import org.mockito.InOrder;\n@@ -71,7 +71,7 @@ public final class StreamProcessorReplayModeTest {\n // when\n startStreamProcessor(replayUntilEnd);\n \n- Awaitility.await()\n+ await()\n .untilAsserted(\n () -> assertThat(getCurrentPhase(replayUntilEnd)).isEqualTo(Phase.PROCESSING));\n \n@@ -163,7 +163,7 @@ public final class StreamProcessorReplayModeTest {\n command().processInstance(ACTIVATE_ELEMENT, RECORD),\n event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n \n- Awaitility.await(\"should have replayed first events\")\n+ await(\"should have replayed first events\")\n .until(replayContinuously::getLastSuccessfulProcessedRecordPosition, (pos) -> pos > 0);\n \n // when\n@@ -210,7 +210,7 @@ public final class StreamProcessorReplayModeTest {\n command().processInstance(ACTIVATE_ELEMENT, RECORD),\n event().processInstance(ELEMENT_ACTIVATING, RECORD).causedBy(0));\n \n- Awaitility.await(\"should have replayed first events\")\n+ await(\"should have replayed first events\")\n .until(replayContinuously::getLastSuccessfulProcessedRecordPosition, (pos) -> pos > 0);\n streamProcessor.pauseProcessing().join();\n replayContinuously.writeBatch(\n@@ -244,7 +244,7 @@ public final class StreamProcessorReplayModeTest {\n // then\n verify(eventApplier, TIMEOUT).applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n \n- Awaitility.await()\n+ await()\n .untilAsserted(\n () -> {\n final var lastProcessedPosition = getLastProcessedPosition(replayContinuously);\n@@ -273,8 +273,7 @@ public final class StreamProcessorReplayModeTest {\n \n verify(eventApplier, TIMEOUT).applyState(anyLong(), eq(ELEMENT_ACTIVATING), any());\n \n- Awaitility.await()\n- .until(() -> getLastProcessedPosition(replayContinuously), isEqual(commandPosition));\n+ await().until(() -> getLastProcessedPosition(replayContinuously), isEqual(commandPosition));\n \n // then\n assertThat(replayContinuously.getLastSuccessfulProcessedRecordPosition())\n@@ -285,7 +284,6 @@ public final class StreamProcessorReplayModeTest {\n @Test\n public void shouldNotSetLastProcessedPositionIfLessThanSnapshotPosition() {\n // given\n- final var commandPositionBeforeSnapshot = 1L;\n final var snapshotPosition = 2L;\n \n startStreamProcessor(replayContinuously);\n@@ -298,23 +296,20 @@ public final class StreamProcessorReplayModeTest {\n // when\n startStreamProcessor(replayContinuously);\n \n- Awaitility.await()\n+ await()\n .untilAsserted(\n () -> assertThat(getCurrentPhase(replayContinuously)).isEqualTo(Phase.REPLAY));\n \n- final var eventPosition =\n- replayContinuously.writeEvent(\n- ELEMENT_ACTIVATING,\n- RECORD,\n- writer -> writer.sourceRecordPosition(commandPositionBeforeSnapshot));\n-\n // then\n final var lastProcessedPositionState = replayContinuously.getLastProcessedPositionState();\n \n- assertThat(lastProcessedPositionState.getLastSuccessfulProcessedRecordPosition())\n- .describedAs(\n- \"Expected that the last processed position is not less than the snapshot position\")\n- .isEqualTo(snapshotPosition);\n+ await()\n+ .untilAsserted(\n+ () ->\n+ assertThat(lastProcessedPositionState.getLastSuccessfulProcessedRecordPosition())\n+ .describedAs(\n+ \"Expected that the last processed position is not less than the snapshot position\")\n+ .isEqualTo(snapshotPosition));\n }\n \n private StreamProcessor startStreamProcessor(final StreamProcessorRule streamProcessorRule) {\n"]
| 1 |
["0d23f1b3ed22e615b9611bb4eae01d2241e64dff"]
|
["refactor"]
|
init environ cache
|
["diff --git a/src/environment.go b/src/environment.go\nindex ae5e26a..0c961c5 100644\n--- a/src/environment.go\n+++ b/src/environment.go\n@@ -229,6 +229,7 @@ func (env *environment) environ() map[string]string {\n \tif env.environCache != nil {\n \t\treturn env.environCache\n \t}\n+\tenv.environCache = make(map[string]string)\n \tconst separator = \"=\"\n \tvalues := os.Environ()\n \tfor value := range values {\n"]
| 1 |
["dc50bd35462a49058c91a939fc8830ae7a9eb692"]
|
["fix"]
|
add LICENSE
|
["diff --git a/LICENSE b/LICENSE\nnew file mode 100644\nindex 0000000..005581d\n--- /dev/null\n+++ b/LICENSE\n@@ -0,0 +1,21 @@\n+MIT License\n+\n+Copyright (c) Hassan El Mghari\n+\n+Permission is hereby granted, free of charge, to any person obtaining a copy\n+of this software and associated documentation files (the \"Software\"), to deal\n+in the Software without restriction, including without limitation the rights\n+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n+copies of the Software, and to permit persons to whom the Software is\n+furnished to do so, subject to the following conditions:\n+\n+The above copyright notice and this permission notice shall be included in all\n+copies or substantial portions of the Software.\n+\n+THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\n+SOFTWARE.\n"]
| 1 |
["096145f0d32a6b351b1db413b04a685952f04fb3"]
|
["docs"]
|
add automation for rebasing `*.x.x` branches
|
["diff --git a/.github/workflows/ibis-rebase-nightly.yml b/.github/workflows/ibis-rebase-nightly.yml\nnew file mode 100644\nindex 0000000..3d2f064\n--- /dev/null\n+++ b/.github/workflows/ibis-rebase-nightly.yml\n@@ -0,0 +1,82 @@\n+name: Update Dependencies\n+on:\n+ schedule:\n+ # run every 24 hours at 1 AM\n+ - cron: \"1 */24 * * *\"\n+ workflow_dispatch:\n+\n+jobs:\n+ generate_branches:\n+ runs-on: ubuntu-latest\n+ outputs:\n+ matrix: ${{ steps.set-matrix.outputs.matrix }}\n+ steps:\n+ - name: output branches list\n+ id: set-matrix\n+ run: |\n+ set -euo pipefail\n+\n+ branches=$(git ls-remote --heads https://github.com/ibis-project/ibis.git \\\n+ | sed -e 's#\\t# #g' -e 's#refs/heads/##g' \\\n+ | cut -d ' ' -f2 \\\n+ | grep -P '\\d+\\.x\\.x' \\\n+ | xargs -I {} printf '\"%s\"' \\\n+ | jq -s '{branch: .}')\n+\n+ echo \"::set-output name=matrix::$branches\"\n+\n+ niv_update:\n+ runs-on: ubuntu-latest\n+ needs:\n+ - generate_branches\n+ strategy:\n+ matrix: ${{ fromJSON(needs.generate_branches.outputs.matrix) }}\n+ steps:\n+ - uses: actions/checkout@v3\n+\n+ - uses: tibdex/github-app-token@v1\n+ id: generate_pr_token\n+ with:\n+ app_id: ${{ secrets.SQUAWK_BOT_APP_ID }}\n+ private_key: ${{ secrets.SQUAWK_BOT_APP_PRIVATE_KEY }}\n+\n+ - uses: tibdex/github-app-token@v1\n+ id: generate_pr_approval_token\n+ with:\n+ app_id: ${{ secrets.PR_APPROVAL_BOT_APP_ID }}\n+ private_key: ${{ secrets.PR_APPROVAL_BOT_APP_PRIVATE_KEY }}\n+\n+ - uses: cpcloud/compare-commits-action@v5.0.19\n+ id: compare_commits\n+ with:\n+ token: ${{ steps.generate_pr_token.outputs.token }}\n+ owner: ibis-project\n+ repo: ibis\n+ basehead: ${{ github.sha }}...${{ steps.get_current_commit.outputs.rev }}\n+ include-merge-commits: false\n+\n+ - uses: peter-evans/create-pull-request@v4\n+ id: create_pr\n+ with:\n+ token: ${{ steps.generate_pr_token.outputs.token }}\n+ commit-message: \"chore(${{ matrix.branch }}): rebase onto upstream\"\n+ branch: \"create-pull-request/rebase-${{ matrix.branch }}\"\n+ base: ${{ matrix.branch }}\n+ delete-branch: true\n+ author: \"ibis-squawk-bot[bot] <ibis-squawk-bot[bot]@users.noreply.github.com>\"\n+ title: \"chore(${{ matrix.branch }}): rebase onto upstream\"\n+ body: ${{ steps.compare_commits.outputs.differences }}\n+ labels: dependencies\n+\n+ - uses: juliangruber/approve-pull-request-action@v1.1.1\n+ if: ${{ fromJSON(steps.create_pr.outputs.pull-request-number) != null }}\n+ with:\n+ github-token: ${{ steps.generate_pr_approval_token.outputs.token }}\n+ number: ${{ steps.create_pr.outputs.pull-request-number }}\n+\n+ - uses: peter-evans/enable-pull-request-automerge@v2\n+ if: ${{ fromJSON(steps.create_pr.outputs.pull-request-number) != null }}\n+ with:\n+ token: ${{ steps.generate_pr_token.outputs.token }}\n+ pull-request-number: ${{ steps.create_pr.outputs.pull-request-number }}\n+ merge-method: rebase\n"]
| 1 |
["e82254c4ca73fe55834f005f08bc2a060496f815"]
|
["cicd"]
|
rename top-level to connection
|
["diff --git a/docs/_quarto.yml b/docs/_quarto.yml\nindex 4e086c7..69471ee 100644\n--- a/docs/_quarto.yml\n+++ b/docs/_quarto.yml\n@@ -140,7 +140,7 @@ website:\n contents:\n - section: Expression API\n contents:\n- - reference/top_level.qmd\n+ - reference/connection.qmd\n - reference/expression-tables.qmd\n - reference/selectors.qmd\n - reference/expression-generic.qmd\n@@ -191,10 +191,10 @@ quartodoc:\n contents:\n - kind: page\n package: ibis\n- path: top_level\n+ path: connection\n summary:\n- name: Top-level APIs\n- desc: Methods and objects available directly on the `ibis` module.\n+ name: Connection API\n+ desc: Create and manage backend connections.\n contents:\n - name: connect\n package: ibis.backends.base\n"]
| 1 |
["9b9cd037645ec716a45b70137f8d2f01ec9ab90c"]
|
["docs"]
|
brew tests/multiple darwin builds/gh enterprise
|
["diff --git a/pipeline/brew/brew.go b/pipeline/brew/brew.go\nindex ec27182..15ed189 100644\n--- a/pipeline/brew/brew.go\n+++ b/pipeline/brew/brew.go\n@@ -1,5 +1,3 @@\n-// Package brew implements the Pipe, providing formula generation and\n-// uploading it to a configured repo.\n package brew\n \n import (\n@@ -10,13 +8,12 @@ import (\n \t\"strings\"\n \t\"text/template\"\n \n-\t\"github.com/goreleaser/goreleaser/internal/artifact\"\n-\n \t\"github.com/apex/log\"\n \n \t\"github.com/goreleaser/goreleaser/checksum\"\n \t\"github.com/goreleaser/goreleaser/config\"\n \t\"github.com/goreleaser/goreleaser/context\"\n+\t\"github.com/goreleaser/goreleaser/internal/artifact\"\n \t\"github.com/goreleaser/goreleaser/internal/client\"\n \t\"github.com/goreleaser/goreleaser/pipeline\"\n )\n@@ -106,14 +103,14 @@ func doRun(ctx *context.Context, client client.Client) error {\n \t\tartifact.And(\n \t\t\tartifact.ByGoos(\"darwin\"),\n \t\t\tartifact.ByGoarch(\"amd64\"),\n-\t\t\tartifact.ByGoarch(\"\"),\n+\t\t\tartifact.ByGoarm(\"\"),\n \t\t\tartifact.ByType(artifact.UploadableArchive),\n \t\t),\n \t).List()\n \tif len(archives) == 0 {\n \t\treturn ErrNoDarwin64Build\n \t}\n-\tif len(archives) > 0 {\n+\tif len(archives) > 1 {\n \t\treturn ErrTooManyDarwin64Builds\n \t}\n \tvar path = filepath.Join(ctx.Config.Brew.Folder, ctx.Config.ProjectName+\".rb\")\n@@ -145,8 +142,7 @@ func doBuildFormula(data templateData) (out bytes.Buffer, err error) {\n }\n \n func dataFor(ctx *context.Context, client client.Client, artifact artifact.Artifact) (result templateData, err error) {\n-\tvar file = artifact.Path\n-\tsum, err := checksum.SHA256(file)\n+\tsum, err := checksum.SHA256(artifact.Path)\n \tif err != nil {\n \t\treturn\n \t}\n@@ -163,7 +159,7 @@ func dataFor(ctx *context.Context, client client.Client, artifact artifact.Artif\n \t\tTag: ctx.Git.CurrentTag,\n \t\tVersion: ctx.Version,\n \t\tCaveats: ctx.Config.Brew.Caveats,\n-\t\tFile: file,\n+\t\tFile: artifact.Name,\n \t\tSHA256: sum,\n \t\tDependencies: ctx.Config.Brew.Dependencies,\n \t\tConflicts: ctx.Config.Brew.Conflicts,\ndiff --git a/pipeline/brew/brew_test.go b/pipeline/brew/brew_test.go\nindex 7e513bf..9066935 100644\n--- a/pipeline/brew/brew_test.go\n+++ b/pipeline/brew/brew_test.go\n@@ -9,6 +9,7 @@ import (\n \n \t\"github.com/goreleaser/goreleaser/config\"\n \t\"github.com/goreleaser/goreleaser/context\"\n+\t\"github.com/goreleaser/goreleaser/internal/artifact\"\n \t\"github.com/goreleaser/goreleaser/internal/testlib\"\n \t\"github.com/stretchr/testify/assert\"\n )\n@@ -93,7 +94,8 @@ func TestRunPipe(t *testing.T) {\n \t\tGit: context.GitInfo{\n \t\t\tCurrentTag: \"v1.0.1\",\n \t\t},\n-\t\tVersion: \"1.0.1\",\n+\t\tVersion: \"1.0.1\",\n+\t\tArtifacts: artifact.New(),\n \t\tConfig: config.Project{\n \t\t\tDist: folder,\n \t\t\tProjectName: \"run-pipe\",\n@@ -124,31 +126,53 @@ func TestRunPipe(t *testing.T) {\n \t\tPublish: true,\n \t}\n \tvar path = filepath.Join(folder, \"bin.tar.gz\")\n-\tctx.AddBinary(\"darwinamd64\", \"bin\", \"bin\", path)\n+\tctx.Artifacts.Add(artifact.Artifact{\n+\t\tName: \"bin.tar.gz\",\n+\t\tPath: path,\n+\t\tGoos: \"darwin\",\n+\t\tGoarch: \"amd64\",\n+\t\tType: artifact.UploadableArchive,\n+\t})\n \tclient := &DummyClient{}\n \tassert.Error(t, doRun(ctx, client))\n \tassert.False(t, client.CreatedFile)\n \n \t_, err = os.Create(path)\n \tassert.NoError(t, err)\n-\tassert.NoError(t, doRun(ctx, client))\n-\tassert.True(t, client.CreatedFile)\n \n-\tbts, err := ioutil.ReadFile(\"testdata/run_pipe.rb\")\n-\tassert.NoError(t, err)\n-\t// ioutil.WriteFile(\"testdata/run_pipe.rb\", []byte(client.Content), 0644)\n+\tt.Run(\"default git url\", func(tt *testing.T) {\n+\t\tassert.NoError(tt, doRun(ctx, client))\n+\t\tassert.True(tt, client.CreatedFile)\n+\n+\t\tbts, err := ioutil.ReadFile(\"testdata/run_pipe.rb\")\n+\t\tassert.NoError(tt, err)\n+\t\t// TODO: make writing this file toggleable somehow?\n+\t\t// ioutil.WriteFile(\"testdata/run_pipe.rb\", []byte(client.Content), 0644)\n+\t\tassert.Equal(tt, string(bts), client.Content)\n+\t})\n \n-\tassert.Equal(t, string(bts), client.Content)\n+\tt.Run(\"github enterprise url\", func(tt *testing.T) {\n+\t\tctx.Config.GitHubURLs.Download = \"http://github.example.org\"\n+\t\tassert.NoError(tt, doRun(ctx, client))\n+\t\tassert.True(tt, client.CreatedFile)\n+\n+\t\tbts, err := ioutil.ReadFile(\"testdata/run_pipe_enterprise.rb\")\n+\t\tassert.NoError(tt, err)\n+\t\t// TODO: make writing this file toggleable somehow?\n+\t\t// ioutil.WriteFile(\"testdata/run_pipe_enterprise.rb\", []byte(client.Content), 0644)\n+\t\tassert.Equal(tt, string(bts), client.Content)\n+\t})\n }\n \n+// TODO: this test is irrelevant and can probavly be removed\n func TestRunPipeFormatOverride(t *testing.T) {\n \tfolder, err := ioutil.TempDir(\"\", \"goreleasertest\")\n \tassert.NoError(t, err)\n \tvar path = filepath.Join(folder, \"bin.zip\")\n \t_, err = os.Create(path)\n \tassert.NoError(t, err)\n-\tvar ctx = &context.Context{\n-\t\tConfig: config.Project{\n+\tvar ctx = context.New(\n+\t\tconfig.Project{\n \t\t\tDist: folder,\n \t\t\tArchive: config.Archive{\n \t\t\t\tFormat: \"tar.gz\",\n@@ -166,9 +190,15 @@ func TestRunPipeFormatOverride(t *testing.T) {\n \t\t\t\t},\n \t\t\t},\n \t\t},\n-\t\tPublish: true,\n-\t}\n-\tctx.AddBinary(\"darwinamd64\", \"bin\", \"bin\", path)\n+\t)\n+\tctx.Publish = true\n+\tctx.Artifacts.Add(artifact.Artifact{\n+\t\tName: \"bin.zip\",\n+\t\tPath: path,\n+\t\tGoos: \"darwin\",\n+\t\tGoarch: \"amd64\",\n+\t\tType: artifact.UploadableArchive,\n+\t})\n \tclient := &DummyClient{}\n \tassert.NoError(t, doRun(ctx, client))\n \tassert.True(t, client.CreatedFile)\n@@ -195,6 +225,40 @@ func TestRunPipeNoDarwin64Build(t *testing.T) {\n \tassert.False(t, client.CreatedFile)\n }\n \n+func TestRunPipeMultipleDarwin64Build(t *testing.T) {\n+\tvar ctx = context.New(\n+\t\tconfig.Project{\n+\t\t\tArchive: config.Archive{\n+\t\t\t\tFormat: \"tar.gz\",\n+\t\t\t},\n+\t\t\tBrew: config.Homebrew{\n+\t\t\t\tGitHub: config.Repo{\n+\t\t\t\t\tOwner: \"test\",\n+\t\t\t\t\tName: \"test\",\n+\t\t\t\t},\n+\t\t\t},\n+\t\t},\n+\t)\n+\tctx.Publish = true\n+\tctx.Artifacts.Add(artifact.Artifact{\n+\t\tName: \"bin1\",\n+\t\tPath: \"doesnt mather\",\n+\t\tGoos: \"darwin\",\n+\t\tGoarch: \"amd64\",\n+\t\tType: artifact.UploadableArchive,\n+\t})\n+\tctx.Artifacts.Add(artifact.Artifact{\n+\t\tName: \"bin2\",\n+\t\tPath: \"doesnt mather\",\n+\t\tGoos: \"darwin\",\n+\t\tGoarch: \"amd64\",\n+\t\tType: artifact.UploadableArchive,\n+\t})\n+\tclient := &DummyClient{}\n+\tassert.Equal(t, ErrTooManyDarwin64Builds, doRun(ctx, client))\n+\tassert.False(t, client.CreatedFile)\n+}\n+\n func TestRunPipeBrewNotSetup(t *testing.T) {\n \tvar ctx = &context.Context{\n \t\tConfig: config.Project{},\n@@ -206,9 +270,8 @@ func TestRunPipeBrewNotSetup(t *testing.T) {\n }\n \n func TestRunPipeBinaryRelease(t *testing.T) {\n-\tvar ctx = &context.Context{\n-\t\tPublish: true,\n-\t\tConfig: config.Project{\n+\tvar ctx = context.New(\n+\t\tconfig.Project{\n \t\t\tArchive: config.Archive{\n \t\t\t\tFormat: \"binary\",\n \t\t\t},\n@@ -219,8 +282,15 @@ func TestRunPipeBinaryRelease(t *testing.T) {\n \t\t\t\t},\n \t\t\t},\n \t\t},\n-\t}\n-\tctx.AddBinary(\"darwinamd64\", \"foo\", \"bar\", \"baz\")\n+\t)\n+\tctx.Publish = true\n+\tctx.Artifacts.Add(artifact.Artifact{\n+\t\tName: \"bin\",\n+\t\tPath: \"doesnt mather\",\n+\t\tGoos: \"darwin\",\n+\t\tGoarch: \"amd64\",\n+\t\tType: artifact.Binary,\n+\t})\n \tclient := &DummyClient{}\n \ttestlib.AssertSkipped(t, doRun(ctx, client))\n \tassert.False(t, client.CreatedFile)\ndiff --git a/pipeline/brew/doc.go b/pipeline/brew/doc.go\nnew file mode 100644\nindex 0000000..2cddc12\n--- /dev/null\n+++ b/pipeline/brew/doc.go\n@@ -0,0 +1,3 @@\n+// Package brew implements the Pipe, providing formula generation and\n+// uploading it to a configured repo.\n+package brew\ndiff --git a/pipeline/brew/testdata/run_pipe_enterprise.rb b/pipeline/brew/testdata/run_pipe_enterprise.rb\nnew file mode 100644\nindex 0000000..4b24ce0\n--- /dev/null\n+++ b/pipeline/brew/testdata/run_pipe_enterprise.rb\n@@ -0,0 +1,33 @@\n+class RunPipe < Formula\n+ desc \"A run pipe test formula\"\n+ homepage \"https://github.com/goreleaser\"\n+ url \"http://github.example.org/test/test/releases/download/v1.0.1/bin.tar.gz\"\n+ version \"1.0.1\"\n+ sha256 \"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855\"\n+ \n+ depends_on \"zsh\"\n+ depends_on \"bash\"\n+ \n+ conflicts_with \"gtk+\"\n+ conflicts_with \"qt\"\n+\n+ def install\n+ bin.install \"foo\"\n+ end\n+\n+ def caveats\n+ \"don't do this\"\n+ end\n+\n+ plist_options :startup => false\n+\n+ def plist; <<-EOS.undent\n+ <xml>whatever</xml>\n+ EOS\n+ end\n+\n+ test do\n+ system \"true\"\n+ system \"#{bin}/foo -h\"\n+ end\n+end\n"]
| 1 |
["f433bcb59c36571e22d4e86c612e0a6a52f73c09"]
|
["feat"]
|
use lambda to define backend operations
|
["diff --git a/ibis/backends/duckdb/registry.py b/ibis/backends/duckdb/registry.py\nindex 20ffd6f..3f56f2a 100644\n--- a/ibis/backends/duckdb/registry.py\n+++ b/ibis/backends/duckdb/registry.py\n@@ -107,28 +107,13 @@ def _literal(_, op):\n return sa.cast(sa.literal(value), sqla_type)\n \n \n-def _array_column(t, op):\n- (arg,) = op.args\n- sqla_type = to_sqla_type(op.output_dtype)\n- return sa.cast(sa.func.list_value(*map(t.translate, arg)), sqla_type)\n-\n-\n def _neg_idx_to_pos(array, idx):\n if_ = getattr(sa.func, \"if\")\n arg_length = sa.func.array_length(array)\n return if_(idx < 0, arg_length + sa.func.greatest(idx, -arg_length), idx)\n \n \n-def _struct_field(t, op):\n- return sa.func.struct_extract(\n- t.translate(op.arg),\n- sa.text(repr(op.field)),\n- type_=to_sqla_type(op.output_dtype),\n- )\n-\n-\n-def _regex_extract(t, op):\n- string, pattern, index = map(t.translate, op.args)\n+def _regex_extract(string, pattern, index):\n result = sa.case(\n [\n (\n@@ -149,8 +134,7 @@ def _regex_extract(t, op):\n return result\n \n \n-def _json_get_item(t, op):\n- left, path = map(t.translate, op.args)\n+def _json_get_item(left, path):\n # Workaround for https://github.com/duckdb/duckdb/issues/5063\n # In some situations duckdb silently does the wrong thing if\n # the path is parametrized.\n@@ -197,7 +181,12 @@ def _struct_column(t, op):\n \n operation_registry.update(\n {\n- ops.ArrayColumn: _array_column,\n+ ops.ArrayColumn: (\n+ lambda t, op: sa.cast(\n+ sa.func.list_value(*map(t.translate, op.cols)),\n+ to_sqla_type(op.output_dtype),\n+ )\n+ ),\n ops.ArrayConcat: fixed_arity(sa.func.array_concat, 2),\n ops.ArrayRepeat: fixed_arity(\n lambda arg, times: sa.func.flatten(\n@@ -222,7 +211,13 @@ operation_registry.update(\n # TODO: map operations, but DuckDB's maps are multimaps\n ops.Modulus: fixed_arity(operator.mod, 2),\n ops.Round: _round,\n- ops.StructField: _struct_field,\n+ ops.StructField: (\n+ lambda t, op: sa.func.struct_extract(\n+ t.translate(op.arg),\n+ sa.text(repr(op.field)),\n+ type_=to_sqla_type(op.output_dtype),\n+ )\n+ ),\n ops.TableColumn: _table_column,\n ops.TimestampDiff: fixed_arity(sa.func.age, 2),\n ops.TimestampFromUNIX: _timestamp_from_unix,\n@@ -232,7 +227,7 @@ operation_registry.update(\n lambda *_: sa.cast(sa.func.now(), sa.TIMESTAMP),\n 0,\n ),\n- ops.RegexExtract: _regex_extract,\n+ ops.RegexExtract: fixed_arity(_regex_extract, 3),\n ops.RegexReplace: fixed_arity(\n lambda *args: sa.func.regexp_replace(*args, \"g\"), 3\n ),\n@@ -255,7 +250,7 @@ operation_registry.update(\n ops.ArgMin: reduction(sa.func.min_by),\n ops.ArgMax: reduction(sa.func.max_by),\n ops.BitwiseXor: fixed_arity(sa.func.xor, 2),\n- ops.JSONGetItem: _json_get_item,\n+ ops.JSONGetItem: fixed_arity(_json_get_item, 2),\n ops.RowID: lambda *_: sa.literal_column('rowid'),\n ops.StringToTimestamp: fixed_arity(sa.func.strptime, 2),\n }\n"]
| 1 |
["5d14de6722eb34c6604a124f6f11cb711f16bd44"]
|
["refactor"]
|
svg helper
|
["diff --git a/src/__test__/m.spec.ts b/src/__test__/m.spec.ts\nindex 0f9cb63..ff5e5f0 100644\n--- a/src/__test__/m.spec.ts\n+++ b/src/__test__/m.spec.ts\n@@ -1,4 +1,4 @@\n-import { className, m, ns, style } from '../m';\n+import { className, m, ns, svg, style } from '../m';\n import { VNode, VProps } from '../structs';\n \n const h = (tag: string, props?: VProps, ...children: VNode[]) =>\n@@ -173,6 +173,28 @@ describe('.m', () => {\n });\n });\n \n+ it('should attach ns to props using svg helper', () => {\n+ const vnode = {\n+ tag: 'svg',\n+ children: [\n+ 'foo',\n+ {\n+ tag: 'div',\n+ },\n+ ],\n+ };\n+ expect(svg(vnode)).toEqual({\n+ tag: 'svg',\n+ props: { ns: 'http://www.w3.org/2000/svg' },\n+ children: [\n+ 'foo',\n+ {\n+ tag: 'div',\n+ },\n+ ],\n+ });\n+ });\n+\n it('should move key to distinct property', () => {\n expect(h('div', { key: 'foo' }, 'foo', h('div'))).toEqual({\n tag: 'div',\n"]
| 1 |
["4aa3e4c438742ef0fe694ffaf6a181874366d777"]
|
["test"]
|
add title to badge icon
|
["diff --git a/kibbeh/src/modules/room/chat/RoomChatList.tsx b/kibbeh/src/modules/room/chat/RoomChatList.tsx\nindex a7418e6..805a9a4 100644\n--- a/kibbeh/src/modules/room/chat/RoomChatList.tsx\n+++ b/kibbeh/src/modules/room/chat/RoomChatList.tsx\n@@ -16,6 +16,11 @@ interface ChatListProps {\n users: RoomUser[];\n }\n \n+interface BadgeIconData {\n+ emoji: string,\n+ title: string\n+}\n+\n export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const { setData } = useContext(UserPreviewModalContext);\n const { messages, toggleFrozen } = useRoomChatStore();\n@@ -48,11 +53,14 @@ export const RoomChatList: React.FC<ChatListProps> = ({ room, users }) => {\n const getBadgeIcon = (m: Message) => {\n const user = users.find((u) => u.id === m.userId);\n const isSpeaker = room.creatorId === user?.id || user?.roomPermissions?.isSpeaker;\n- let emoji = null;\n+ let badgeIconData: BadgeIconData | null = null;\n if (isSpeaker) {\n- emoji = \"\ud83d\udce3\";\n+ badgeIconData = {\n+ emoji: \"\ud83d\udce3\",\n+ title: \"Speaker\"\n+ };\n }\n- return emoji && <Twemoji text={emoji} style={{ marginRight: \"1ch\" }}/>;\n+ return badgeIconData && <Twemoji text={badgeIconData.emoji} title={badgeIconData.title} style={{ marginRight: \"1ch\" }}/>;\n };\n \n return (\n"]
| 1 |
["6e5098655e6d9bb13f6423abe780cdf6b50ff13a"]
|
["feat"]
|
new ShowDebug parameter
calculate each segment timing
new parameter to show/hide segment debug information
set-poshprompt updated with the new showDebug parameter
Force disabled segment to be visible for debug purpose
|
["diff --git a/engine.go b/engine.go\nindex 6cc1ff3..4617ceb 100644\n--- a/engine.go\n+++ b/engine.go\n@@ -67,6 +67,9 @@ func (e *engine) renderText(text string) {\n \tprefix := e.activeSegment.getValue(Prefix, \" \")\n \tpostfix := e.activeSegment.getValue(Postfix, \" \")\n \te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"%s%s%s\", prefix, text, postfix))\n+\tif *e.env.getArgs().Debug {\n+\t\te.renderer.write(e.activeSegment.Background, e.activeSegment.Foreground, fmt.Sprintf(\"(%s:%s)\", e.activeSegment.Type, e.activeSegment.timing))\n+\t}\n }\n \n func (e *engine) renderSegmentText(text string) {\n@@ -107,13 +110,11 @@ func (e *engine) setStringValues(segments []*Segment) {\n \twg.Add(len(segments))\n \tdefer wg.Wait()\n \tcwd := e.env.getcwd()\n+\tdebug := *e.env.getArgs().Debug\n \tfor _, segment := range segments {\n \t\tgo func(s *Segment) {\n \t\t\tdefer wg.Done()\n-\t\t\terr := s.mapSegmentWithWriter(e.env)\n-\t\t\tif err == nil && !s.hasValue(IgnoreFolders, cwd) && s.enabled() {\n-\t\t\t\ts.stringValue = s.string()\n-\t\t\t}\n+\t\t\ts.setStringValue(e.env, cwd, debug)\n \t\t}(segment)\n \t}\n }\ndiff --git a/main.go b/main.go\nindex 56ae8a5..d67a640 100644\n--- a/main.go\n+++ b/main.go\n@@ -14,6 +14,7 @@ type args struct {\n \tConfig *string\n \tShell *string\n \tPWD *string\n+\tDebug *bool\n }\n \n func main() {\n@@ -42,6 +43,10 @@ func main() {\n \t\t\t\"pwd\",\n \t\t\t\"\",\n \t\t\t\"the path you are working in\"),\n+\t\tDebug: flag.Bool(\n+\t\t\t\"debug\",\n+\t\t\tfalse,\n+\t\t\t\"Print debug information\"),\n \t}\n \tflag.Parse()\n \tenv := &environment{\ndiff --git a/packages/powershell/oh-my-posh/oh-my-posh.psm1 b/packages/powershell/oh-my-posh/oh-my-posh.psm1\nindex 9234fc6..1450eb3 100644\n--- a/packages/powershell/oh-my-posh/oh-my-posh.psm1\n+++ b/packages/powershell/oh-my-posh/oh-my-posh.psm1\n@@ -5,6 +5,7 @@\n \n $global:PoshSettings = New-Object -TypeName PSObject -Property @{\n Theme = \"$PSScriptRoot\\themes\\jandedobbeleer.json\";\n+ ShowDebug = $false\n }\n \n function Get-PoshCommand {\n@@ -36,9 +37,14 @@ function Set-PoshPrompt {\n param(\n [Parameter(Mandatory = $false)]\n [string]\n- $Theme\n+ $Theme,\n+ [Parameter(Mandatory = $false)]\n+ [bool]\n+ $ShowDebug = $false\n )\n \n+ $global:PoshSettings.ShowDebug = $ShowDebug\n+\n if (Test-Path \"$PSScriptRoot/themes/$Theme.json\") {\n $global:PoshSettings.Theme = \"$PSScriptRoot/themes/$Theme.json\"\n }\n@@ -68,8 +74,9 @@ function Set-PoshPrompt {\n $startInfo = New-Object System.Diagnostics.ProcessStartInfo\n $startInfo.FileName = Get-PoshCommand\n $config = $global:PoshSettings.Theme\n+ $showDebug = $global:PoshSettings.ShowDebug\n $cleanPWD = $PWD.ProviderPath.TrimEnd(\"\\\")\n- $startInfo.Arguments = \"-config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n+ $startInfo.Arguments = \"-debug=\"\"$showDebug\"\" -config=\"\"$config\"\" -error=$errorCode -pwd=\"\"$cleanPWD\"\"\"\n $startInfo.Environment[\"TERM\"] = \"xterm-256color\"\n $startInfo.CreateNoWindow = $true\n $startInfo.StandardOutputEncoding = [System.Text.Encoding]::UTF8\ndiff --git a/segment.go b/segment.go\nindex 27dd416..4015dac 100644\n--- a/segment.go\n+++ b/segment.go\n@@ -1,6 +1,9 @@\n package main\n \n-import \"errors\"\n+import (\n+\t\"errors\"\n+\t\"time\"\n+)\n \n // Segment represent a single segment and it's configuration\n type Segment struct {\n@@ -17,6 +20,7 @@ type Segment struct {\n \twriter SegmentWriter\n \tstringValue string\n \tactive bool\n+\ttiming time.Duration\n }\n \n // SegmentWriter is the interface used to define what and if to write to the prompt\n@@ -149,3 +153,26 @@ func (segment *Segment) mapSegmentWithWriter(env environmentInfo) error {\n \t}\n \treturn errors.New(\"unable to map writer\")\n }\n+\n+func (segment *Segment) setStringValue(env environmentInfo, cwd string, debug bool) {\n+\terr := segment.mapSegmentWithWriter(env)\n+\tif err != nil || segment.hasValue(IgnoreFolders, cwd) {\n+\t\treturn\n+\t}\n+\t// add timing only in debug\n+\tif debug {\n+\t\tstart := time.Now()\n+\t\tdefer (func() {\n+\t\t\t// force segment rendering to display the time it took\n+\t\t\t// to check if the segment is enabled or not\n+\t\t\t// depending on the segement, calling enabled()\n+\t\t\t// can be time consuming\n+\t\t\tsegment.active = true\n+\t\t\telapsed := time.Since(start)\n+\t\t\tsegment.timing = elapsed\n+\t\t})()\n+\t}\n+\tif segment.enabled() {\n+\t\tsegment.stringValue = segment.string()\n+\t}\n+}\n"]
| 1 |
["bea32587586ca08f390c901a95e9b9c25263f4df"]
|
["feat"]
|
extract _value expr from predicate
|
["diff --git a/predicate/src/delete_predicate.rs b/predicate/src/delete_predicate.rs\nindex 02e679a..6368df3 100644\n--- a/predicate/src/delete_predicate.rs\n+++ b/predicate/src/delete_predicate.rs\n@@ -120,6 +120,7 @@ impl From<DeletePredicate> for crate::predicate::Predicate {\n partition_key: None,\n range: Some(pred.range),\n exprs: pred.exprs.into_iter().map(|expr| expr.into()).collect(),\n+ value_expr: vec![],\n }\n }\n }\ndiff --git a/predicate/src/predicate.rs b/predicate/src/predicate.rs\nindex d7f3b62..2aa8fdf 100644\n--- a/predicate/src/predicate.rs\n+++ b/predicate/src/predicate.rs\n@@ -11,7 +11,7 @@ use std::{\n use data_types::timestamp::TimestampRange;\n use datafusion::{\n error::DataFusionError,\n- logical_plan::{col, lit_timestamp_nano, Expr, Operator},\n+ logical_plan::{col, lit_timestamp_nano, Column, Expr, Operator},\n optimizer::utils,\n };\n use datafusion_util::{make_range_expr, AndExprBuilder};\n@@ -26,6 +26,7 @@ pub const EMPTY_PREDICATE: Predicate = Predicate {\n exprs: vec![],\n range: None,\n partition_key: None,\n+ value_expr: vec![],\n };\n \n #[derive(Debug, Clone, Copy)]\n@@ -72,6 +73,11 @@ pub struct Predicate {\n /// these expressions should be returned. Other rows are excluded\n /// from the results.\n pub exprs: Vec<Expr>,\n+\n+ /// Optional arbitrary predicates on the special `_value` column. These\n+ /// expressions are applied to `field_columns` projections in the form of\n+ /// `CASE` statement conditions.\n+ pub value_expr: Vec<BinaryExpr>,\n }\n \n impl Predicate {\n@@ -469,6 +475,14 @@ impl PredicateBuilder {\n }\n }\n \n+// A representation of the `BinaryExpr` variant of a Datafusion expression.\n+#[derive(Clone, Debug, PartialEq, PartialOrd)]\n+pub struct BinaryExpr {\n+ pub left: Column,\n+ pub op: Operator,\n+ pub right: Expr,\n+}\n+\n #[cfg(test)]\n mod tests {\n use super::*;\ndiff --git a/query/src/frontend/influxrpc.rs b/query/src/frontend/influxrpc.rs\nindex 5ac7a2e..70c43f2 100644\n--- a/query/src/frontend/influxrpc.rs\n+++ b/query/src/frontend/influxrpc.rs\n@@ -9,8 +9,7 @@ use data_types::chunk_metadata::ChunkId;\n use datafusion::{\n error::{DataFusionError, Result as DatafusionResult},\n logical_plan::{\n- binary_expr, lit, Column, DFSchemaRef, Expr, ExprRewriter, LogicalPlan, LogicalPlanBuilder,\n- Operator,\n+ lit, Column, DFSchemaRef, Expr, ExprRewriter, LogicalPlan, LogicalPlanBuilder, Operator,\n },\n optimizer::utils::expr_to_columns,\n prelude::col,\n@@ -20,7 +19,7 @@ use datafusion_util::AsExpr;\n \n use hashbrown::{HashMap, HashSet};\n use observability_deps::tracing::{debug, trace};\n-use predicate::predicate::{Predicate, PredicateMatch};\n+use predicate::predicate::{BinaryExpr, Predicate, PredicateMatch};\n use schema::selection::Selection;\n use schema::{InfluxColumnType, Schema, TIME_COLUMN_NAME};\n use snafu::{ensure, OptionExt, ResultExt, Snafu};\n@@ -243,7 +242,6 @@ impl InfluxRpcPlanner {\n // and which chunks needs full plan and group them into their table\n for chunk in database.chunks(normalizer.unnormalized()) {\n let table_name = chunk.table_name();\n- let schema = chunk.schema();\n \n // Table is already in the returned table list, no longer needs to discover it from other chunks\n if builder.contains_meta_data_table(table_name.to_string()) {\n@@ -260,7 +258,7 @@ impl InfluxRpcPlanner {\n } else {\n // See if we can have enough info from the chunk's meta data to answer\n // that this table participates in the request\n- let predicate = normalizer.normalized(table_name, schema);\n+ let predicate = normalizer.normalized(table_name);\n //\n // Try and apply the predicate using only metadata\n let pred_result = chunk\n@@ -346,7 +344,7 @@ impl InfluxRpcPlanner {\n let mut do_full_plan = chunk.has_delete_predicates();\n \n let table_name = chunk.table_name();\n- let predicate = normalizer.normalized(table_name, chunk.schema());\n+ let predicate = normalizer.normalized(table_name);\n \n // Try and apply the predicate using only metadata\n let pred_result = chunk\n@@ -474,7 +472,7 @@ impl InfluxRpcPlanner {\n let mut do_full_plan = chunk.has_delete_predicates();\n \n let table_name = chunk.table_name();\n- let predicate = normalizer.normalized(table_name, chunk.schema());\n+ let predicate = normalizer.normalized(table_name);\n \n // Try and apply the predicate using only metadata\n let pred_result = chunk\n@@ -821,7 +819,7 @@ impl InfluxRpcPlanner {\n {\n let mut table_chunks = BTreeMap::new();\n for chunk in chunks {\n- let predicate = normalizer.normalized(chunk.table_name(), chunk.schema());\n+ let predicate = normalizer.normalized(chunk.table_name());\n // Try and apply the predicate using only metadata\n let pred_result = chunk\n .apply_predicate_to_metadata(&predicate)\n@@ -1040,9 +1038,8 @@ impl InfluxRpcPlanner {\n C: QueryChunk + 'static,\n {\n let table_name = table_name.as_ref();\n- let scan_and_filter =\n- self.scan_and_filter(table_name, Arc::clone(&schema), normalizer, chunks)?;\n- let predicate = normalizer.normalized(table_name, schema);\n+ let scan_and_filter = self.scan_and_filter(table_name, schema, normalizer, chunks)?;\n+ let predicate = normalizer.normalized(table_name);\n \n let TableScanAndFilter {\n plan_builder,\n@@ -1152,9 +1149,8 @@ impl InfluxRpcPlanner {\n C: QueryChunk + 'static,\n {\n let table_name = table_name.into();\n- let scan_and_filter =\n- self.scan_and_filter(&table_name, Arc::clone(&schema), normalizer, chunks)?;\n- let predicate = normalizer.normalized(&table_name, schema);\n+ let scan_and_filter = self.scan_and_filter(&table_name, schema, normalizer, chunks)?;\n+ let predicate = normalizer.normalized(&table_name);\n \n let TableScanAndFilter {\n plan_builder,\n@@ -1263,9 +1259,8 @@ impl InfluxRpcPlanner {\n C: QueryChunk + 'static,\n {\n let table_name = table_name.into();\n- let scan_and_filter =\n- self.scan_and_filter(&table_name, Arc::clone(&schema), normalizer, chunks)?;\n- let predicate = normalizer.normalized(&table_name, schema);\n+ let scan_and_filter = self.scan_and_filter(&table_name, schema, normalizer, chunks)?;\n+ let predicate = normalizer.normalized(&table_name);\n \n let TableScanAndFilter {\n plan_builder,\n@@ -1342,7 +1337,7 @@ impl InfluxRpcPlanner {\n where\n C: QueryChunk + 'static,\n {\n- let predicate = normalizer.normalized(table_name, Arc::clone(&schema));\n+ let predicate = normalizer.normalized(table_name);\n \n // Scan all columns to begin with (DataFusion projection\n // push-down optimization will prune out unneeded columns later)\n@@ -1701,13 +1696,13 @@ impl PredicateNormalizer {\n \n /// Return a reference to a predicate specialized for `table_name` based on\n /// its `schema`.\n- fn normalized(&mut self, table_name: &str, schema: Arc<Schema>) -> Arc<Predicate> {\n+ fn normalized(&mut self, table_name: &str) -> Arc<Predicate> {\n if let Some(normalized_predicate) = self.normalized.get(table_name) {\n return normalized_predicate.inner();\n }\n \n let normalized_predicate =\n- TableNormalizedPredicate::new(table_name, schema, self.unnormalized.clone());\n+ TableNormalizedPredicate::new(table_name, self.unnormalized.clone());\n \n self.normalized\n .entry(table_name.to_string())\n@@ -1752,13 +1747,18 @@ struct TableNormalizedPredicate {\n }\n \n impl TableNormalizedPredicate {\n- fn new(table_name: &str, schema: Arc<Schema>, mut inner: Predicate) -> Self {\n+ fn new(table_name: &str, mut inner: Predicate) -> Self {\n let mut field_projections = BTreeSet::new();\n+ let mut field_value_exprs = vec![];\n+\n inner.exprs = inner\n .exprs\n .into_iter()\n .map(|e| rewrite_measurement_references(table_name, e))\n- .map(|e| rewrite_field_value_references(Arc::clone(&schema), e))\n+ // Rewrite any references to `_value = some_value` to literal true values.\n+ // Keeps track of these expressions, which can then be used to\n+ // augment field projections with conditions using `CASE` statements.\n+ .map(|e| rewrite_field_value_references(&mut field_value_exprs, e))\n .map(|e| {\n // Rewrite any references to `_field = a_field_name` with a literal true\n // and keep track of referenced field names to add to the field\n@@ -1766,6 +1766,8 @@ impl TableNormalizedPredicate {\n rewrite_field_column_references(&mut field_projections, e)\n })\n .collect::<Vec<_>>();\n+ // Store any field value (`_value`) expressions on the `Predicate`.\n+ inner.value_expr = field_value_exprs;\n \n if !field_projections.is_empty() {\n match &mut inner.field_columns {\n@@ -1811,23 +1813,19 @@ impl ExprRewriter for MeasurementRewriter<'_> {\n }\n }\n \n-/// Rewrites a predicate on `_value` to a disjunctive set of expressions on each\n-/// distinct field column in the table.\n-///\n-/// For example, the predicate `_value = 1.77` on a table with three field\n-/// columns would be rewritten to:\n-///\n-/// `(field1 = 1.77 OR field2 = 1.77 OR field3 = 1.77)`.\n-fn rewrite_field_value_references(schema: Arc<Schema>, expr: Expr) -> Expr {\n- let mut rewriter = FieldValueRewriter { schema };\n+/// Rewrites an expression on `_value` as a boolean true literal, pushing any\n+/// encountered expressions onto `value_exprs` so they can be moved onto column\n+/// projections.\n+fn rewrite_field_value_references(value_exprs: &mut Vec<BinaryExpr>, expr: Expr) -> Expr {\n+ let mut rewriter = FieldValueRewriter { value_exprs };\n expr.rewrite(&mut rewriter).expect(\"rewrite is infallible\")\n }\n \n-struct FieldValueRewriter {\n- schema: Arc<Schema>,\n+struct FieldValueRewriter<'a> {\n+ value_exprs: &'a mut Vec<BinaryExpr>,\n }\n \n-impl ExprRewriter for FieldValueRewriter {\n+impl<'a> ExprRewriter for FieldValueRewriter<'a> {\n fn mutate(&mut self, expr: Expr) -> DatafusionResult<Expr> {\n Ok(match expr {\n Expr::BinaryExpr {\n@@ -1836,21 +1834,16 @@ impl ExprRewriter for FieldValueRewriter {\n ref right,\n } => {\n if let Expr::Column(inner) = &**left {\n- if inner.name != VALUE_COLUMN_NAME {\n- return Ok(expr); // column name not `_value`.\n+ if inner.name == VALUE_COLUMN_NAME {\n+ self.value_exprs.push(BinaryExpr {\n+ left: inner.to_owned(),\n+ op,\n+ right: right.as_expr(),\n+ });\n+ return Ok(Expr::Literal(ScalarValue::Boolean(Some(true))));\n }\n-\n- // build a disjunctive expression using binary expressions\n- // for each field column and the original expression's\n- // operator and rhs.\n- self.schema\n- .fields_iter()\n- .map(|field| binary_expr(col(field.name()), op, *right.clone()))\n- .reduce(|a, b| a.or(b))\n- .expect(\"at least one field column\")\n- } else {\n- expr\n }\n+ expr\n }\n _ => expr,\n })\n@@ -1918,7 +1911,7 @@ pub fn schema_has_all_expr_columns(schema: &Schema, expr: &Expr) -> bool {\n \n #[cfg(test)]\n mod tests {\n- use datafusion::logical_plan::Operator;\n+ use datafusion::logical_plan::{binary_expr, Operator};\n use schema::builder::SchemaBuilder;\n \n use super::*;\n@@ -1958,56 +1951,57 @@ mod tests {\n \n #[test]\n fn test_field_value_rewriter() {\n- let schema = SchemaBuilder::new()\n- .tag(\"t1\")\n- .tag(\"t2\")\n- .field(\"f1\", DataType::Float64)\n- .field(\"f2\", DataType::Float64)\n- .timestamp()\n- .build()\n- .unwrap();\n-\n let mut rewriter = FieldValueRewriter {\n- schema: Arc::new(schema),\n+ value_exprs: &mut vec![],\n };\n \n let cases = vec![\n (\n binary_expr(col(\"f1\"), Operator::Eq, lit(1.82)),\n binary_expr(col(\"f1\"), Operator::Eq, lit(1.82)),\n+ vec![],\n ),\n- (col(\"t2\"), col(\"t2\")),\n+ (col(\"t2\"), col(\"t2\"), vec![]),\n (\n binary_expr(col(VALUE_COLUMN_NAME), Operator::Eq, lit(1.82)),\n- //\n- // _value = 1.82 -> f1 = (1.82 OR f2 = 1.82)\n- //\n- binary_expr(\n- binary_expr(col(\"f1\"), Operator::Eq, lit(1.82)),\n- Operator::Or,\n- binary_expr(col(\"f2\"), Operator::Eq, lit(1.82)),\n- ),\n+ // _value = 1.82 -> true\n+ lit(true),\n+ vec![BinaryExpr {\n+ left: Column {\n+ relation: None,\n+ name: VALUE_COLUMN_NAME.into(),\n+ },\n+ op: Operator::Eq,\n+ right: lit(1.82),\n+ }],\n ),\n ];\n \n- for (input, exp) in cases {\n+ for (input, exp, mut value_exprs) in cases {\n let rewritten = input.rewrite(&mut rewriter).unwrap();\n assert_eq!(rewritten, exp);\n+ assert_eq!(rewriter.value_exprs, &mut value_exprs);\n }\n \n // Test case with single field.\n- let schema = SchemaBuilder::new()\n- .field(\"f1\", DataType::Float64)\n- .timestamp()\n- .build()\n- .unwrap();\n let mut rewriter = FieldValueRewriter {\n- schema: Arc::new(schema),\n+ value_exprs: &mut vec![],\n };\n \n let input = binary_expr(col(VALUE_COLUMN_NAME), Operator::Gt, lit(1.88));\n let rewritten = input.rewrite(&mut rewriter).unwrap();\n- assert_eq!(rewritten, binary_expr(col(\"f1\"), Operator::Gt, lit(1.88)));\n+ assert_eq!(rewritten, lit(true));\n+ assert_eq!(\n+ rewriter.value_exprs,\n+ &mut vec![BinaryExpr {\n+ left: Column {\n+ relation: None,\n+ name: VALUE_COLUMN_NAME.into(),\n+ },\n+ op: Operator::Gt,\n+ right: lit(1.88),\n+ }]\n+ );\n }\n \n #[test]\n"]
| 1 |
["0cb9751b0a1bdd8d2c88b45d4366e760d6b1bbed"]
|
["refactor"]
|
cancel in-progress dep update jobs when a new one arrives [skip ci]
|
["diff --git a/.github/workflows/update-deps.yml b/.github/workflows/update-deps.yml\nindex 3a71e29..25f6f27 100644\n--- a/.github/workflows/update-deps.yml\n+++ b/.github/workflows/update-deps.yml\n@@ -4,6 +4,11 @@ on:\n # run every 24 hours at midnight\n - cron: \"0 */24 * * *\"\n workflow_dispatch:\n+\n+concurrency:\n+ group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}\n+ cancel-in-progress: true\n+\n jobs:\n generate_updates:\n runs-on: ubuntu-latest\n"]
| 1 |
["c2300c94c6b7d1599387272b616e1d79e93723c7"]
|
["cicd"]
|
make jq use compact json for rebase branch query
|
["diff --git a/.github/workflows/ibis-rebase-nightly.yml b/.github/workflows/ibis-rebase-nightly.yml\nindex 0e284b0..4a3ec7a 100644\n--- a/.github/workflows/ibis-rebase-nightly.yml\n+++ b/.github/workflows/ibis-rebase-nightly.yml\n@@ -22,7 +22,7 @@ jobs:\n | cut -d ' ' -f2 \\\n | grep -P '\\d+\\.x\\.x' \\\n | xargs printf '\"%s\"' \\\n- | jq -s '{branch: .}')\n+ | jq -rcMs '{branch: .}')\n \n echo \"::set-output name=matrix::$branches\"\n \n"]
| 1 |
["4638dcdf7011e8e42d11fde04f068f22ee20fa1d"]
|
["cicd"]
|
replace api call which requires auth token in public page
re #4694
Signed-off-by: Pranav C <pranavxc@gmail.com>
|
["diff --git a/packages/nc-gui/composables/useSharedView.ts b/packages/nc-gui/composables/useSharedView.ts\nindex cb0c5ea..f67a6c9 100644\n--- a/packages/nc-gui/composables/useSharedView.ts\n+++ b/packages/nc-gui/composables/useSharedView.ts\n@@ -17,7 +17,7 @@ export function useSharedView() {\n \n const { appInfo } = $(useGlobal())\n \n- const { loadProject } = useProject()\n+ const { project } = useProject()\n \n const appInfoDefaultLimit = appInfo.defaultLimit || 25\n \n@@ -76,7 +76,16 @@ export function useSharedView() {\n \n await setMeta(viewMeta.model)\n \n- await loadProject(true, viewMeta.project_id)\n+ // if project is not defined then set it with an object containing base\n+ if (!project.value?.bases)\n+ project.value = {\n+ bases: [\n+ {\n+ id: viewMeta.base_id,\n+ type: viewMeta.client,\n+ },\n+ ],\n+ }\n \n const relatedMetas = { ...viewMeta.relatedMetas }\n Object.keys(relatedMetas).forEach((key) => setMeta(relatedMetas[key]))\n"]
| 1 |
["4986a5892fb00bd5a6b2065ad8cfefbc36052dd7"]
|
["fix"]
|
reset padding first on PadLog
Signed-off-by: Carlos A Becker <caarlos0@users.noreply.github.com>
|
["diff --git a/internal/middleware/logging/logging.go b/internal/middleware/logging/logging.go\nindex 1a3adc7..92c8eb5 100644\n--- a/internal/middleware/logging/logging.go\n+++ b/internal/middleware/logging/logging.go\n@@ -35,6 +35,7 @@ func Log(title string, next middleware.Action) middleware.Action {\n func PadLog(title string, next middleware.Action) middleware.Action {\n \treturn func(ctx *context.Context) error {\n \t\tdefer log.ResetPadding()\n+\t\tlog.ResetPadding()\n \t\tlog.IncreasePadding()\n \t\tlog.Infof(bold.Render(title))\n \t\tlog.IncreasePadding()\n"]
| 1 |
["c5904a9004fca1e438168ca7334a0deefab536ff"]
|
["fix"]
|
add getting started gitlab ci configuration
Signed-off-by: Adrien Brault <adrien.brault@gmail.com>
|
["diff --git a/docs/getting-started/1201-ci-environment.md b/docs/getting-started/1201-ci-environment.md\nindex 6c72b15..2313e30 100644\n--- a/docs/getting-started/1201-ci-environment.md\n+++ b/docs/getting-started/1201-ci-environment.md\n@@ -46,7 +46,60 @@ If you would like us to document CircleCI next, vote for it here: [dagger#1677](\n \n <TabItem value=\"gitlab\">\n \n-If you would like us to document GitLab next, vote for it here: [dagger#1677](https://github.com/dagger/dagger/discussions/1677)\n+```yaml\n+.docker:\n+ image: docker:${DOCKER_VERSION}-git\n+ services:\n+ - docker:${DOCKER_VERSION}-dind\n+ variables:\n+ # See https://docs.gitlab.com/ee/ci/docker/using_docker_build.html#docker-in-docker-with-tls-enabled-in-the-docker-executor\n+ DOCKER_HOST: tcp://docker:2376\n+\n+ DOCKER_TLS_VERIFY: '1'\n+ DOCKER_TLS_CERTDIR: '/certs'\n+ DOCKER_CERT_PATH: '/certs/client'\n+\n+ # Faster than the default, apparently\n+ DOCKER_DRIVER: overlay2\n+\n+ DOCKER_VERSION: '20.10'\n+\n+.dagger:\n+ extends: [.docker]\n+ variables:\n+ DAGGER_VERSION: 0.2.4\n+ DAGGER_LOG_FORMAT: plain\n+ DAGGER_CACHE_PATH: .dagger-cache\n+\n+ ARGS: ''\n+ cache:\n+ key: dagger-${CI_JOB_NAME}\n+ paths:\n+ - ${DAGGER_CACHE_PATH}\n+ before_script:\n+ - apk add --no-cache curl\n+ - |\n+ # install dagger\n+ cd /usr/local\n+ curl -L https://dl.dagger.io/dagger/install.sh | sh\n+ cd -\n+\n+ dagger version\n+ script:\n+ - dagger project update\n+ - |\n+ dagger \\\n+ do \\\n+ --cache-from type=local,src=${DAGGER_CACHE_PATH} \\\n+ --cache-to type=local,mode=max,dest=${DAGGER_CACHE_PATH} \\\n+ ${ARGS}\n+\n+build:\n+ extends: [.dagger]\n+ variables:\n+ ARGS: build\n+\n+```\n \n </TabItem>\n \n"]
| 1 |
["12257ce53f94dc902df4ba087de90f52d2840ad4"]
|
["docs"]
|
Support ISNULL
|
["diff --git a/rust/cubesql/src/compile/engine/udf.rs b/rust/cubesql/src/compile/engine/udf.rs\nindex f3991c0..549167a 100644\n--- a/rust/cubesql/src/compile/engine/udf.rs\n+++ b/rust/cubesql/src/compile/engine/udf.rs\n@@ -3,13 +3,16 @@ use std::sync::Arc;\n \n use datafusion::{\n arrow::{\n- array::{ArrayRef, GenericStringArray, Int32Builder, StringBuilder, UInt32Builder},\n+ array::{\n+ ArrayRef, BooleanBuilder, GenericStringArray, Int32Builder, StringBuilder,\n+ UInt32Builder,\n+ },\n datatypes::DataType,\n },\n error::DataFusionError,\n logical_plan::create_udf,\n physical_plan::{\n- functions::{make_scalar_function, Volatility},\n+ functions::{make_scalar_function, ReturnTypeFunction, Signature, Volatility},\n udf::ScalarUDF,\n },\n };\n@@ -167,3 +170,24 @@ pub fn create_instr_udf() -> ScalarUDF {\n fun,\n )\n }\n+\n+pub fn create_isnull_udf() -> ScalarUDF {\n+ let fun = make_scalar_function(move |args: &[ArrayRef]| {\n+ assert!(args.len() == 1);\n+\n+ let mut builder = BooleanBuilder::new(1);\n+ builder.append_value(args[0].is_null(0))?;\n+\n+ Ok(Arc::new(builder.finish()) as ArrayRef)\n+ });\n+\n+ let return_type: ReturnTypeFunction =\n+ Arc::new(move |_| Ok(Arc::new(DataType::Boolean).clone()));\n+\n+ ScalarUDF::new(\n+ \"isnull\",\n+ &Signature::any(1, Volatility::Immutable),\n+ &return_type,\n+ &fun,\n+ )\n+}\ndiff --git a/rust/cubesql/src/compile/mod.rs b/rust/cubesql/src/compile/mod.rs\nindex 891283b..9004ffe 100644\n--- a/rust/cubesql/src/compile/mod.rs\n+++ b/rust/cubesql/src/compile/mod.rs\n@@ -36,7 +36,7 @@ use self::context::*;\n use self::engine::context::SystemVar;\n use self::engine::udf::{\n create_connection_id_udf, create_current_user_udf, create_db_udf, create_instr_udf,\n- create_user_udf, create_version_udf,\n+ create_isnull_udf, create_user_udf, create_version_udf,\n };\n use self::parser::parse_sql_to_statement;\n \n@@ -1415,6 +1415,7 @@ impl QueryPlanner {\n ctx.register_udf(create_user_udf(props));\n ctx.register_udf(create_current_user_udf(props));\n ctx.register_udf(create_instr_udf());\n+ ctx.register_udf(create_isnull_udf());\n \n {\n let schema_provider = MemorySchemaProvider::new();\n"]
| 1 |
["f0a4b62f4bd2a1ba2caf37c764b117b352a2f2b3"]
|
["feat"]
|
common routine for browser timezone
Signed-off-by: Raju Udava <86527202+dstala@users.noreply.github.com>
|
["diff --git a/tests/playwright/tests/db/timezone.spec.ts b/tests/playwright/tests/db/timezone.spec.ts\nindex c966c2b..a30c7e4 100644\n--- a/tests/playwright/tests/db/timezone.spec.ts\n+++ b/tests/playwright/tests/db/timezone.spec.ts\n@@ -6,6 +6,7 @@ import { Api, UITypes } from 'nocodb-sdk';\n import { ProjectsPage } from '../../pages/ProjectsPage';\n import { isMysql, isPg, isSqlite } from '../../setup/db';\n import { getKnexConfig } from '../utils/config';\n+import { getBrowserTimezoneOffset } from '../utils/general';\n let api: Api<any>, records: any[];\n \n const columns = [\n@@ -680,11 +681,7 @@ test.describe.serial('External DB - DateTime column', async () => {\n await dashboard.rootPage.waitForTimeout(2000);\n \n // get timezone offset\n- const timezoneOffset = new Date().getTimezoneOffset();\n- const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n- const minutes = Math.abs(timezoneOffset % 60);\n- const sign = timezoneOffset <= 0 ? '+' : '-';\n- const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ const formattedOffset = getBrowserTimezoneOffset();\n \n await dashboard.treeView.openBase({ title: 'datetimetable' });\n await dashboard.treeView.openTable({ title: 'MyTable' });\n@@ -844,11 +841,7 @@ test.describe('Ext DB MySQL : DB Timezone configured as HKT', () => {\n }\n \n // get timezone offset\n- const timezoneOffset = new Date().getTimezoneOffset();\n- const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n- const minutes = Math.abs(timezoneOffset % 60);\n- const sign = timezoneOffset <= 0 ? '+' : '-';\n- const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ const formattedOffset = getBrowserTimezoneOffset();\n \n // connect after timezone is set\n await connectToExtDb(context);\ndiff --git a/tests/playwright/tests/utils/general.ts b/tests/playwright/tests/utils/general.ts\nindex 56a9e1a..45e9c6c 100644\n--- a/tests/playwright/tests/utils/general.ts\n+++ b/tests/playwright/tests/utils/general.ts\n@@ -50,4 +50,14 @@ function getDefaultPwd() {\n return 'Password123.';\n }\n \n-export { getTextExcludeIconText, isSubset, getIconText, getDefaultPwd };\n+function getBrowserTimezoneOffset() {\n+ // get timezone offset\n+ const timezoneOffset = new Date().getTimezoneOffset();\n+ const hours = Math.floor(Math.abs(timezoneOffset) / 60);\n+ const minutes = Math.abs(timezoneOffset % 60);\n+ const sign = timezoneOffset <= 0 ? '+' : '-';\n+ const formattedOffset = `${sign}${hours.toString().padStart(2, '0')}:${minutes.toString().padStart(2, '0')}`;\n+ return formattedOffset;\n+}\n+\n+export { getTextExcludeIconText, isSubset, getIconText, getDefaultPwd, getBrowserTimezoneOffset };\n"]
| 1 |
["7d3e9b3a98b02f6cb1f3444dc7e3a0459aeb26a7"]
|
["test"]
|
Fix readme
Signed-off-by: Ben Johnson <bjohnson@binarylogic.com>
|
["diff --git a/README.md b/README.md\nindex 587d655..da746bb 100644\n--- a/README.md\n+++ b/README.md\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - [docker][docs.sources.docker], [file][docs.sources.file], [journald][docs.sources.journald], [kafka][docs.sources.kafka]\n+* [**Transforms**][docs.transforms] - [json_parser][docs.transforms.json_parser], [log_to_metric][docs.transforms.log_to_metric], [lua][docs.transforms.lua], [regex_parser][docs.transforms.regex_parser]\n+* [**Sinks**][docs.sinks] - [aws_cloudwatch_logs][docs.sinks.aws_cloudwatch_logs], [aws_cloudwatch_metrics][docs.sinks.aws_cloudwatch_metrics], [aws_kinesis_streams][docs.sinks.aws_kinesis_streams], [aws_s3][docs.sinks.aws_s3], [clickhouse][docs.sinks.clickhouse], [elasticsearch][docs.sinks.elasticsearch], and [15 more][docs.sinks]\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,88 +110,6 @@ curl --proto '=https' --tlsv1.2 -sSf https://sh.vector.dev | sh\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`docker`**][docs.sources.docker] | Ingests data through the docker engine daemon and outputs [`log`][docs.data-model#log] events. |\n-| [**`file`**][docs.sources.file] | Ingests data through one or more local files and outputs [`log`][docs.data-model#log] events. |\n-| [**`journald`**][docs.sources.journald] | Ingests data through log records from journald and outputs [`log`][docs.data-model#log] events. |\n-| [**`kafka`**][docs.sources.kafka] | Ingests data through Kafka 0.9 or later and outputs [`log`][docs.data-model#log] events. |\n-| [**`statsd`**][docs.sources.statsd] | Ingests data through the StatsD UDP protocol and outputs [`metric`][docs.data-model#metric] events. |\n-| [**`stdin`**][docs.sources.stdin] | Ingests data through standard input (STDIN) and outputs [`log`][docs.data-model#log] events. |\n-| [**`syslog`**][docs.sources.syslog] | Ingests data through the Syslog 5424 protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`tcp`**][docs.sources.tcp] | Ingests data through the TCP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`udp`**][docs.sources.udp] | Ingests data through the UDP protocol and outputs [`log`][docs.data-model#log] events. |\n-| [**`vector`**][docs.sources.vector] | Ingests data through another upstream [`vector` sink][docs.sinks.vector] and outputs [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events. |\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`add_fields`**][docs.transforms.add_fields] | Accepts [`log`][docs.data-model#log] events and allows you to add one or more log fields. |\n-| [**`add_tags`**][docs.transforms.add_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to add one or more metric tags. |\n-| [**`coercer`**][docs.transforms.coercer] | Accepts [`log`][docs.data-model#log] events and allows you to coerce log fields into fixed types. |\n-| [**`field_filter`**][docs.transforms.field_filter] | Accepts [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events and allows you to filter events by a log field's value. |\n-| [**`grok_parser`**][docs.transforms.grok_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value with [Grok][urls.grok]. |\n-| [**`json_parser`**][docs.transforms.json_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field value as JSON. |\n-| [**`log_to_metric`**][docs.transforms.log_to_metric] | Accepts [`log`][docs.data-model#log] events and allows you to convert logs into one or more metrics. |\n-| [**`lua`**][docs.transforms.lua] | Accepts [`log`][docs.data-model#log] events and allows you to transform events with a full embedded [Lua][urls.lua] engine. |\n-| [**`regex_parser`**][docs.transforms.regex_parser] | Accepts [`log`][docs.data-model#log] events and allows you to parse a log field's value with a [Regular Expression][urls.regex]. |\n-| [**`remove_fields`**][docs.transforms.remove_fields] | Accepts [`log`][docs.data-model#log] events and allows you to remove one or more log fields. |\n-| [**`remove_tags`**][docs.transforms.remove_tags] | Accepts [`metric`][docs.data-model#metric] events and allows you to remove one or more metric tags. |\n-| [**`sampler`**][docs.transforms.sampler] | Accepts [`log`][docs.data-model#log] events and allows you to sample events with a configurable rate. |\n-| [**`split`**][docs.transforms.split] | Accepts [`log`][docs.data-model#log] events and allows you to split a field's value on a given separator and zip the tokens into ordered field names. |\n-| [**`tokenizer`**][docs.transforms.tokenizer] | Accepts [`log`][docs.data-model#log] events and allows you to tokenize a field's value by splitting on white space, ignoring special wrapping characters, and zip the tokens into ordered field names. |\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-| Name | Description |\n-|:------|:------------|\n-| [**`aws_cloudwatch_logs`**][docs.sinks.aws_cloudwatch_logs] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS CloudWatch Logs][urls.aws_cw_logs] via the [`PutLogEvents` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatchLogs/latest/APIReference/API_PutLogEvents.html). |\n-| [**`aws_cloudwatch_metrics`**][docs.sinks.aws_cloudwatch_metrics] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [AWS CloudWatch Metrics][urls.aws_cw_metrics] via the [`PutMetricData` API endpoint](https://docs.aws.amazon.com/AmazonCloudWatch/latest/APIReference/API_PutMetricData.html). |\n-| [**`aws_kinesis_streams`**][docs.sinks.aws_kinesis_streams] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS Kinesis Data Stream][urls.aws_kinesis_data_streams] via the [`PutRecords` API endpoint](https://docs.aws.amazon.com/kinesis/latest/APIReference/API_PutRecords.html). |\n-| [**`aws_s3`**][docs.sinks.aws_s3] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [AWS S3][urls.aws_s3] via the [`PutObject` API endpoint](https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectPUT.html). |\n-| [**`blackhole`**][docs.sinks.blackhole] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to a blackhole that simply discards data, designed for testing and benchmarking purposes. |\n-| [**`clickhouse`**][docs.sinks.clickhouse] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Clickhouse][urls.clickhouse] via the [`HTTP` Interface][urls.clickhouse_http]. |\n-| [**`console`**][docs.sinks.console] | [Streams](#streaming) [`log`][docs.data-model#log] and [`metric`][docs.data-model#metric] events to [standard output streams][urls.standard_streams], such as `STDOUT` and `STDERR`. |\n-| [**`datadog_metrics`**][docs.sinks.datadog_metrics] | [Batches](#buffers-and-batches) [`metric`][docs.data-model#metric] events to [Datadog][urls.datadog] metrics service using [HTTP API](https://docs.datadoghq.com/api/?lang=bash#metrics). |\n-| [**`elasticsearch`**][docs.sinks.elasticsearch] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to [Elasticsearch][urls.elasticsearch] via the [`_bulk` API endpoint](https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html). |\n-| [**`file`**][docs.sinks.file] | [Streams](#streaming) [`log`][docs.data-model#log] events to a file. |\n-| [**`http`**][docs.sinks.http] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a generic HTTP endpoint. |\n-| [**`kafka`**][docs.sinks.kafka] | [Streams](#streaming) [`log`][docs.data-model#log] events to [Apache Kafka][urls.kafka] via the [Kafka protocol][urls.kafka_protocol]. |\n-| [**`prometheus`**][docs.sinks.prometheus] | [Exposes](#exposing-and-scraping) [`metric`][docs.data-model#metric] events to [Prometheus][urls.prometheus] metrics service. |\n-| [**`splunk_hec`**][docs.sinks.splunk_hec] | [Batches](#buffers-and-batches) [`log`][docs.data-model#log] events to a [Splunk HTTP Event Collector][urls.splunk_hec]. |\n-| [**`statsd`**][docs.sinks.statsd] | [Streams](#streaming) [`metric`][docs.data-model#metric] events to [StatsD][urls.statsd] metrics service. |\n-| [**`tcp`**][docs.sinks.tcp] | [Streams](#streaming) [`log`][docs.data-model#log] events to a TCP connection. |\n-| [**`vector`**][docs.sinks.vector] | [Streams](#streaming) [`log`][docs.data-model#log] events to another downstream [`vector` source][docs.sources.vector]. |\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright 2019, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\n@@ -200,8 +123,6 @@ the License.\n [docs.configuration]: https://vector.dev/docs/setup/configuration\n [docs.containers]: https://vector.dev/docs/setup/installation/containers\n [docs.correctness]: https://vector.dev/docs/about/correctness\n-[docs.data-model#log]: https://vector.dev/docs/about/data-model#log\n-[docs.data-model#metric]: https://vector.dev/docs/about/data-model#metric\n [docs.data-model.log]: https://vector.dev/docs/about/data-model/log\n [docs.data-model.metric]: https://vector.dev/docs/about/data-model/metric\n [docs.data_model]: https://vector.dev/docs/about/data-model\n@@ -209,6 +130,7 @@ the License.\n [docs.from-archives]: https://vector.dev/docs/setup/installation/manual/from-archives\n [docs.from-source]: https://vector.dev/docs/setup/installation/manual/from-source\n [docs.guarantees]: https://vector.dev/docs/about/guarantees\n+[docs.guides.getting_started]: https://vector.dev/docs/setup/guides/getting-started\n [docs.guides]: https://vector.dev/docs/setup/guides\n [docs.installation]: https://vector.dev/docs/setup/installation\n [docs.monitoring]: https://vector.dev/docs/administration/monitoring\n@@ -224,72 +146,25 @@ the License.\n [docs.sinks.aws_cloudwatch_metrics]: https://vector.dev/docs/reference/sinks/aws_cloudwatch_metrics\n [docs.sinks.aws_kinesis_streams]: https://vector.dev/docs/reference/sinks/aws_kinesis_streams\n [docs.sinks.aws_s3]: https://vector.dev/docs/reference/sinks/aws_s3\n-[docs.sinks.blackhole]: https://vector.dev/docs/reference/sinks/blackhole\n [docs.sinks.clickhouse]: https://vector.dev/docs/reference/sinks/clickhouse\n-[docs.sinks.console]: https://vector.dev/docs/reference/sinks/console\n-[docs.sinks.datadog_metrics]: https://vector.dev/docs/reference/sinks/datadog_metrics\n [docs.sinks.elasticsearch]: https://vector.dev/docs/reference/sinks/elasticsearch\n-[docs.sinks.file]: https://vector.dev/docs/reference/sinks/file\n-[docs.sinks.http]: https://vector.dev/docs/reference/sinks/http\n-[docs.sinks.kafka]: https://vector.dev/docs/reference/sinks/kafka\n-[docs.sinks.prometheus]: https://vector.dev/docs/reference/sinks/prometheus\n-[docs.sinks.splunk_hec]: https://vector.dev/docs/reference/sinks/splunk_hec\n-[docs.sinks.statsd]: https://vector.dev/docs/reference/sinks/statsd\n-[docs.sinks.tcp]: https://vector.dev/docs/reference/sinks/tcp\n-[docs.sinks.vector]: https://vector.dev/docs/reference/sinks/vector\n [docs.sinks]: https://vector.dev/docs/reference/sinks\n [docs.sources.docker]: https://vector.dev/docs/reference/sources/docker\n [docs.sources.file]: https://vector.dev/docs/reference/sources/file\n [docs.sources.journald]: https://vector.dev/docs/reference/sources/journald\n [docs.sources.kafka]: https://vector.dev/docs/reference/sources/kafka\n-[docs.sources.statsd]: https://vector.dev/docs/reference/sources/statsd\n-[docs.sources.stdin]: https://vector.dev/docs/reference/sources/stdin\n-[docs.sources.syslog]: https://vector.dev/docs/reference/sources/syslog\n-[docs.sources.tcp]: https://vector.dev/docs/reference/sources/tcp\n-[docs.sources.udp]: https://vector.dev/docs/reference/sources/udp\n-[docs.sources.vector]: https://vector.dev/docs/reference/sources/vector\n [docs.sources]: https://vector.dev/docs/reference/sources\n [docs.topologies]: https://vector.dev/docs/setup/deployment/topologies\n-[docs.transforms.add_fields]: https://vector.dev/docs/reference/transforms/add_fields\n-[docs.transforms.add_tags]: https://vector.dev/docs/reference/transforms/add_tags\n-[docs.transforms.coercer]: https://vector.dev/docs/reference/transforms/coercer\n-[docs.transforms.field_filter]: https://vector.dev/docs/reference/transforms/field_filter\n-[docs.transforms.grok_parser]: https://vector.dev/docs/reference/transforms/grok_parser\n [docs.transforms.json_parser]: https://vector.dev/docs/reference/transforms/json_parser\n [docs.transforms.log_to_metric]: https://vector.dev/docs/reference/transforms/log_to_metric\n [docs.transforms.lua]: https://vector.dev/docs/reference/transforms/lua\n [docs.transforms.regex_parser]: https://vector.dev/docs/reference/transforms/regex_parser\n-[docs.transforms.remove_fields]: https://vector.dev/docs/reference/transforms/remove_fields\n-[docs.transforms.remove_tags]: https://vector.dev/docs/reference/transforms/remove_tags\n-[docs.transforms.sampler]: https://vector.dev/docs/reference/transforms/sampler\n-[docs.transforms.split]: https://vector.dev/docs/reference/transforms/split\n-[docs.transforms.tokenizer]: https://vector.dev/docs/reference/transforms/tokenizer\n [docs.transforms]: https://vector.dev/docs/reference/transforms\n [docs.updating]: https://vector.dev/docs/administration/updating\n [docs.use_cases]: https://vector.dev/docs/use_cases\n [docs.validating]: https://vector.dev/docs/administration/validating\n-[urls.aws_cw_logs]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/WhatIsCloudWatchLogs.html\n-[urls.aws_cw_metrics]: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/working_with_metrics.html\n-[urls.aws_kinesis_data_streams]: https://aws.amazon.com/kinesis/data-streams/\n-[urls.aws_s3]: https://aws.amazon.com/s3/\n-[urls.clickhouse]: https://clickhouse.yandex/\n-[urls.clickhouse_http]: https://clickhouse.yandex/docs/en/interfaces/http/\n-[urls.datadog]: https://www.datadoghq.com\n-[urls.elasticsearch]: https://www.elastic.co/products/elasticsearch\n-[urls.grok]: http://grokdebug.herokuapp.com/\n-[urls.kafka]: https://kafka.apache.org/\n-[urls.kafka_protocol]: https://kafka.apache.org/protocol\n-[urls.lua]: https://www.lua.org/\n [urls.mailing_list]: https://vector.dev/mailing_list/\n-[urls.new_sink]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_source]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.new_transform]: https://github.com/timberio/vector/issues/new?labels=Type%3A+New+Feature\n-[urls.prometheus]: https://prometheus.io/\n-[urls.regex]: https://en.wikipedia.org/wiki/Regular_expression\n [urls.rust]: https://www.rust-lang.org/\n-[urls.splunk_hec]: http://dev.splunk.com/view/event-collector/SP-CAAAE6M\n-[urls.standard_streams]: https://en.wikipedia.org/wiki/Standard_streams\n-[urls.statsd]: https://github.com/statsd/statsd\n [urls.test_harness]: https://github.com/timberio/vector-test-harness/\n [urls.v0.5.0]: https://github.com/timberio/vector/releases/tag/v0.5.0\n [urls.vector_changelog]: https://github.com/timberio/vector/blob/master/CHANGELOG.md\ndiff --git a/README.md.erb b/README.md.erb\nindex 3b14aa0..cc241eb 100644\n--- a/README.md.erb\n+++ b/README.md.erb\n@@ -38,15 +38,20 @@ simple and unified.\n * [**Installation**][docs.installation] - [containers][docs.containers], [operating systems][docs.operating_systems], [package managers][docs.package_managers], [from archives][docs.from-archives], [from source][docs.from-source]\n * [**Configuration**][docs.configuration]\n * [**Deployment**][docs.deployment] - [topologies][docs.topologies], [roles][docs.roles]\n+* [**Guides**][docs.guides] - [getting started][docs.guides.getting_started]\n \n-#### [Components](https://vector.dev/components)\n+#### Reference\n \n-* [**Sources**][docs.sources] - \n-* [**Transforms**][docs.transforms]\n-* [**Sinks**][docs.sinks]\n+* [**Sources**][docs.sources] - <%= common_component_links(:source) %>\n+* [**Transforms**][docs.transforms] - <%= common_component_links(:transform) %>\n+* [**Sinks**][docs.sinks] - <%= common_component_links(:sink) %>\n \n-* [**Administration**][docs.administration] - [process management][docs.process-management], [monitoring][docs.monitoring], [updating][docs.updating], [validating][docs.validating]\n-* [**Guides**][docs.guides]\n+#### Administration\n+\n+* [**Process management**][docs.process-management]\n+* [**Monitoring**][docs.monitoring]\n+* [**Updating**][docs.updating]\n+* [**Validating**][docs.validating]\n \n #### Resources\n \n@@ -105,44 +110,6 @@ Run the following in your terminal, then follow the on-screen instructions.\n \n Or view [platform specific installation instructions][docs.installation].\n \n-\n-## Sources\n-\n-<%= components_table(metadata.sources.to_h.values.sort) %>\n-\n-[+ request a new source][urls.new_source]\n-\n-\n-## Transforms\n-\n-<%= components_table(metadata.transforms.to_h.values.sort) %>\n-\n-[+ request a new transform][urls.new_transform]\n-\n-\n-## Sinks\n-\n-<%= components_table(metadata.sinks.to_h.values.sort) %>\n-\n-[+ request a new sink][urls.new_sink]\n-\n-\n-## License\n-\n-Copyright <%= Time.now.year %>, Vector Authors. All rights reserved.\n-\n-Licensed under the Apache License, Version 2.0 (the \"License\"); you may not\n-use these files except in compliance with the License. You may obtain a copy\n-of the License at\n-\n-http://www.apache.org/licenses/LICENSE-2.0\n-\n-Unless required by applicable law or agreed to in writing, software\n-distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n-WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n-License for the specific language governing permissions and limitations under\n-the License.\n-\n ---\n \n <p align=\"center\">\ndiff --git a/scripts/generate/templates.rb b/scripts/generate/templates.rb\nindex e5e7ce7..c793ae0 100644\n--- a/scripts/generate/templates.rb\n+++ b/scripts/generate/templates.rb\n@@ -89,6 +89,23 @@ class Templates\n render(\"#{partials_path}/_commit_type_toc_item.md\", binding).gsub(/,$/, \"\")\n end\n \n+ def common_component_links(type, limit = 5)\n+ common = metadata.send(\"#{type.to_s.pluralize}_list\").select(&:common?)\n+\n+ links =\n+ common[0..limit].collect do |component|\n+ \"[#{component.name}][docs.#{type.to_s.pluralize}.#{component.name}]\"\n+ end\n+\n+ num_leftover = common.size - links.size\n+\n+ if num_leftover > 0\n+ links << \"and [15 more][docs.#{type.to_s.pluralize}]\"\n+ end\n+\n+ links.join(\", \")\n+ end\n+\n def component_config_example(component)\n render(\"#{partials_path}/_component_config_example.md\", binding).strip\n end\ndiff --git a/scripts/util/metadata/component.rb b/scripts/util/metadata/component.rb\nindex 0873b2e..4dc5650 100644\n--- a/scripts/util/metadata/component.rb\n+++ b/scripts/util/metadata/component.rb\n@@ -9,6 +9,7 @@ class Component\n include Comparable\n \n attr_reader :beta,\n+ :common,\n :function_category,\n :id,\n :name,\n@@ -18,6 +19,7 @@ class Component\n \n def initialize(hash)\n @beta = hash[\"beta\"] == true\n+ @common = hash[\"common\"] == true\n @function_category = hash.fetch(\"function_category\")\n @name = hash.fetch(\"name\")\n @type ||= self.class.name.downcase\n@@ -71,6 +73,10 @@ class Component\n beta == true\n end\n \n+ def common?\n+ common == true\n+ end\n+\n def context_options\n options_list.select(&:context?)\n end\ndiff --git a/website/src/components/VectorComponents/index.js b/website/src/components/VectorComponents/index.js\nindex b6c5c13..d3c9adf 100644\n--- a/website/src/components/VectorComponents/index.js\n+++ b/website/src/components/VectorComponents/index.js\n@@ -154,7 +154,7 @@ function VectorComponents(props) {\n //\n \n const [onlyAtLeastOnce, setOnlyAtLeastOnce] = useState(queryObj['at-least-once'] == 'true');\n- const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['providers']));\n+ const [onlyFunctions, setOnlyFunctions] = useState(new Set(queryObj['functions']));\n const [onlyLog, setOnlyLog] = useState(queryObj['log'] == 'true');\n const [onlyMetric, setOnlyMetric] = useState(queryObj['metric'] == 'true');\n const [onlyProductionReady, setOnlyProductionReady] = useState(queryObj['prod-ready'] == 'true');\n"]
| 1 |
["662c5d1346ea2b01c0bc3c11c648cbdf92035fe2"]
|
["docs"]
|
get ip from forwarded header
|
["diff --git a/kousa/lib/broth/socket_handler.ex b/kousa/lib/broth/socket_handler.ex\nindex d142135..5828f30 100644\n--- a/kousa/lib/broth/socket_handler.ex\n+++ b/kousa/lib/broth/socket_handler.ex\n@@ -22,7 +22,7 @@ defmodule Broth.SocketHandler do\n ## initialization boilerplate\n \n @impl true\n- def init(request = %{peer: {ip, _reverse_port}}, _state) do\n+ def init(request, _state) do\n props = :cowboy_req.parse_qs(request)\n \n compression =\n@@ -37,10 +37,16 @@ defmodule Broth.SocketHandler do\n _ -> :json\n end\n \n+ ip =\n+ case request.headers do\n+ %{\"x-forwarded-for\" => v} -> v\n+ _ -> nil\n+ end\n+\n state = %__MODULE__{\n awaiting_init: true,\n user_id: nil,\n- ip: IP.to_string(ip),\n+ ip: ip,\n encoding: encoding,\n compression: compression,\n callers: get_callers(request)\ndiff --git a/kousa/test/_support/ws_client.ex b/kousa/test/_support/ws_client.ex\nindex aeca704..125da17 100644\n--- a/kousa/test/_support/ws_client.ex\n+++ b/kousa/test/_support/ws_client.ex\n@@ -19,7 +19,9 @@ defmodule BrothTest.WsClient do\n \n @api_url\n |> Path.join(\"socket\")\n- |> WebSockex.start_link(__MODULE__, nil, extra_headers: [{\"user-agent\", ancestors}])\n+ |> WebSockex.start_link(__MODULE__, nil,\n+ extra_headers: [{\"user-agent\", ancestors}, {\"x-forwarded-for\", \"127.0.0.1\"}]\n+ )\n end\n \n ###########################################################################\n"]
| 1 |
["2f5718743a830d40ddf272ad46f253dbb6d08cff"]
|
["fix"]
|
set first-attempt to 5s and subsequent-attempt to 180s by default
|
["diff --git a/testnet/stacks-node/src/config.rs b/testnet/stacks-node/src/config.rs\nindex 24ca06c..d80f721 100644\n--- a/testnet/stacks-node/src/config.rs\n+++ b/testnet/stacks-node/src/config.rs\n@@ -1414,8 +1414,8 @@ impl MinerConfig {\n pub fn default() -> MinerConfig {\n MinerConfig {\n min_tx_fee: 1,\n- first_attempt_time_ms: 1_000,\n- subsequent_attempt_time_ms: 30_000,\n+ first_attempt_time_ms: 5_000,\n+ subsequent_attempt_time_ms: 180_000,\n microblock_attempt_time_ms: 30_000,\n probability_pick_no_estimate_tx: 5,\n }\n"]
| 1 |
["d35d302cadf355a169dca6636597183de6bbee23"]
|
["fix"]
|
add jackson dependencies for zb-bpmn-model
|
["diff --git a/parent/pom.xml b/parent/pom.xml\nindex d475131..6290e66 100644\n--- a/parent/pom.xml\n+++ b/parent/pom.xml\n@@ -35,6 +35,7 @@\n <version.mockito>1.8.5</version.mockito>\n <version.assertj>3.8.0</version.assertj>\n <version.msgpack>0.8.13</version.msgpack>\n+ <version.jackson>2.9.0</version.jackson>\n <version.jmh>1.11.2</version.jmh>\n <version.sbe>1.5.6</version.sbe>\n <version.slf4j>1.7.23</version.slf4j>\n@@ -64,6 +65,18 @@\n </dependency>\n \n <dependency>\n+ <groupId>com.fasterxml.jackson.core</groupId>\n+ <artifactId>jackson-databind</artifactId>\n+ <version>${version.jackson}</version>\n+ </dependency>\n+\n+ <dependency>\n+ <groupId>com.fasterxml.jackson.dataformat</groupId>\n+ <artifactId>jackson-dataformat-yaml</artifactId>\n+ <version>${version.jackson}</version>\n+ </dependency>\n+\n+ <dependency>\n <groupId>org.msgpack</groupId>\n <artifactId>msgpack-core</artifactId>\n <version>${version.msgpack}</version>\n"]
| 1 |
["fab09655d5cc30727289cc3f26e5396fce235cd3"]
|
["build"]
|
fix test
Write another record so the commit position is updated and we can take a snapshot
|
["diff --git a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\nindex 24f1316..881c727 100644\n--- a/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n+++ b/qa/integration-tests/src/test/java/io/camunda/zeebe/it/clustering/ReaderCloseTest.java\n@@ -70,6 +70,14 @@ public class ReaderCloseTest {\n .getCluster()\n .getNodeId();\n clusteringRule.forceClusterToHaveNewLeader(followerId);\n+ // because of https://github.com/camunda-cloud/zeebe/issues/8329\n+ // we need to add another record so we can do a snapshot\n+ clientRule\n+ .getClient()\n+ .newPublishMessageCommand()\n+ .messageName(\"test\")\n+ .correlationKey(\"test\")\n+ .send();\n \n // when\n clusteringRule.triggerAndWaitForSnapshots();\n@@ -78,6 +86,7 @@ public class ReaderCloseTest {\n for (final Broker broker : clusteringRule.getBrokers()) {\n assertThatFilesOfDeletedSegmentsDoesNotExist(broker);\n }\n+ assertThat(leaderId).isNotEqualTo(clusteringRule.getLeaderForPartition(1).getNodeId());\n }\n \n private void assertThatFilesOfDeletedSegmentsDoesNotExist(final Broker leader)\n"]
| 1 |
["47df74d40becf915a9d89cdb887abd259b77def0"]
|
["test"]
|
change min checked results for score calculation
|
["diff --git a/server/src/services/courseService.ts b/server/src/services/courseService.ts\nindex 89633f4..10bfc55 100644\n--- a/server/src/services/courseService.ts\n+++ b/server/src/services/courseService.ts\n@@ -580,8 +580,7 @@ export async function getTaskSolutionCheckers(courseTaskId: number, minCheckedCo\n .createQueryBuilder('tsr')\n .select('tsr.\"studentId\", ROUND(AVG(tsr.score)) as \"score\"')\n .where(qb => {\n- // query students with 3 checked tasks\n-\n+ // query students who checked enough tasks\n const query = qb\n .subQuery()\n .select('r.\"checkerId\"')\n@@ -600,7 +599,7 @@ export async function getTaskSolutionCheckers(courseTaskId: number, minCheckedCo\n })\n .andWhere('tsr.\"courseTaskId\" = :courseTaskId', { courseTaskId })\n .groupBy('tsr.\"studentId\"')\n- .having(`COUNT(tsr.id) >= :count`, { count: minCheckedCount })\n+ .having(`COUNT(tsr.id) >= :count`, { count: minCheckedCount - 1 })\n .getRawMany();\n \n return records.map(record => ({ studentId: record.studentId, score: Number(record.score) }));\n"]
| 1 |
["fd849bd08363df60dbc8b9b6d55bac4f5ace88f4"]
|
["docs"]
|
fix readme
|
["diff --git a/crates/dagger-sdk/README.md b/crates/dagger-sdk/README.md\nindex ed96be1..974fb7f 100644\n--- a/crates/dagger-sdk/README.md\n+++ b/crates/dagger-sdk/README.md\n@@ -29,9 +29,9 @@ fn main() -> eyre::Result<()> {\n let client = dagger_sdk::connect()?;\n \n let version = client\n- .container(None)\n- .from(\"golang:1.19\".into())\n- .with_exec(vec![\"go\".into(), \"version\".into()], None)\n+ .container()\n+ .from(\"golang:1.19\")\n+ .with_exec(vec![\"go\", \"version\"])\n .stdout()?;\n \n println!(\"Hello from Dagger and {}\", version.trim());\n"]
| 1 |
["04e70ce964b343e28b3dbd0c46d10ccda958ab8c"]
|
["docs"]
|
add tests for ProfilePage methods
|
["diff --git a/client/src/components/Profile/PreScreeningIviewCard.tsx b/client/src/components/Profile/PreScreeningIviewCard.tsx\nindex f84392a..2031203 100644\n--- a/client/src/components/Profile/PreScreeningIviewCard.tsx\n+++ b/client/src/components/Profile/PreScreeningIviewCard.tsx\n@@ -27,7 +27,7 @@ type State = {\n isPreScreeningIviewModalVisible: boolean;\n };\n \n-class CoreJSIviewsCard extends React.PureComponent<Props, State> {\n+class PreScreeningIviewsCard extends React.PureComponent<Props, State> {\n state = {\n courseIndex: 0,\n isPreScreeningIviewModalVisible: false,\n@@ -98,4 +98,4 @@ class CoreJSIviewsCard extends React.PureComponent<Props, State> {\n }\n }\n \n-export default CoreJSIviewsCard;\n+export default PreScreeningIviewsCard;\ndiff --git a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\nindex 7b73c3f..54b378c 100644\n--- a/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\n+++ b/client/src/components/Profile/__test__/__snapshots__/PreScreeningIviewCard.test.tsx.snap\n@@ -1,7 +1,7 @@\n // Jest Snapshot v1, https://goo.gl/fbAQLP\n \n exports[`PreScreeningIviewCard Should render correctly 1`] = `\n-<CoreJSIviewsCard\n+<PreScreeningIviewsCard\n data={\n Array [\n Object {\n@@ -3015,5 +3015,5 @@ exports[`PreScreeningIviewCard Should render correctly 1`] = `\n </div>\n </Card>\n </CommonCard>\n-</CoreJSIviewsCard>\n+</PreScreeningIviewsCard>\n `;\ndiff --git a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\nindex 079d966..95f3e49 100644\n--- a/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\n+++ b/client/src/pages/profile/__tests__/ProfilePage.tests.tsx\n@@ -4,7 +4,6 @@ import { shallowToJson } from 'enzyme-to-json';\n import { NextRouter } from 'next/router';\n import { Session } from 'components/withSession';\n import { ProfilePage } from '../index';\n-// import { GeneralInfo } from '../../../../../common/models/profile';\n \n jest.mock('next/config', () => () => ({}));\n jest.mock('services/user', () => ({\n@@ -12,80 +11,378 @@ jest.mock('services/user', () => ({\n getProfileInfo() {\n return jest.fn();\n }\n+ saveProfileInfo() {\n+ return jest.fn();\n+ }\n },\n }),\n );\n \n-describe('ProfilePage', () => {\n- const profile = {\n- generalInfo: {\n- name: 'Dzmitry Petrov',\n- githubId: 'petrov',\n- aboutMyself: 'Test',\n+const profile = {\n+ permissionsSettings: {\n+ isProfileVisible: { all: true },\n+ isAboutVisible: { mentor: true, student: false, all: false },\n+ isEducationVisible: { mentor: true, student: false, all: false },\n+ isEnglishVisible: { student: false, all: false },\n+ isEmailVisible: { student: false, all: false },\n+ isTelegramVisible: { student: false, all: false },\n+ isSkypeVisible: { student: false, all: false },\n+ isPhoneVisible: { student: false, all: false },\n+ isContactsNotesVisible: { student: true, all: false },\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\n+ isStudentStatsVisible: { student: false, all: false },\n+ },\n+ generalInfo: {\n+ aboutMyself: 'Test',\n+ educationHistory: [{\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ }],\n+ englishLevel: 'a2+',\n+ locationId: 456,\n+ locationName: 'Brest',\n+ },\n+ contacts: {},\n+ mentorStats: [\n+ {},\n+ ],\n+ studentStats: [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n locationName: 'Minsk',\n- locationId: '1',\n- educationHistory: null,\n- englishLevel: 'a2+',\n- },\n- permissionsSettings: {\n- isProfileVisible: { all: true },\n- isAboutVisible: { mentor: true, student: false, all: false },\n- isEducationVisible: { mentor: true, student: false, all: false },\n- isEnglishVisible: { student: false, all: false },\n- isEmailVisible: { student: false, all: false },\n- isTelegramVisible: { student: false, all: false },\n- isSkypeVisible: { student: false, all: false },\n- isPhoneVisible: { student: false, all: false },\n- isContactsNotesVisible: { student: true, all: false },\n- isLinkedInVisible: { mentor: true, student: false, all: false },\n- isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n- isMentorStatsVisible: { mentor: true, student: true, all: false },\n- isStudentStatsVisible: { student: false, all: false },\n- },\n- contacts: {\n- phone: '+375292123456',\n- email: 'petro@gmail.com',\n- skype: 'petro:live',\n- telegram: 'petro',\n- notes: 'discord: @petro, instagram: @petro12',\n- },\n- isPermissionsSettingsChanged: true,\n- isProfileSettingsChanged: true,\n- };\n- const session = {\n- id: 2020,\n- githubId: 'mikhama',\n- isAdmin: true,\n- isHirer: false,\n- isActivist: false,\n- roles: {\n- 1: 'mentor',\n- 2: 'student',\n- 11: 'mentor',\n- },\n- coursesRoles: {\n- 13: [\n- 'manager',\n+ tasks: [\n+ {\n+ interviewFormAnswers: {},\n+ },\n ],\n },\n- } as Session;\n- const router = {\n- query: {\n- githubId: 'petrov',\n- },\n- asPath: '/#edit/',\n- } as unknown as NextRouter;\n+ ],\n+ publicFeedback: [\n+ {},\n+ ],\n+ stageInterviewFeedback: [\n+ {},\n+ ],\n+};\n+const session = {\n+ id: 2020,\n+ githubId: 'mikhama',\n+ isAdmin: true,\n+ isHirer: false,\n+ isActivist: false,\n+ roles: {\n+ 1: 'mentor',\n+ 2: 'student',\n+ 11: 'mentor',\n+ },\n+ coursesRoles: {\n+ 13: [\n+ 'manager',\n+ ],\n+ },\n+} as Session;\n+const router = {\n+ query: {\n+ githubId: 'petrov',\n+ },\n+ asPath: '/#edit/',\n+} as unknown as NextRouter;\n+const state = {\n+ profile,\n+ isInitialPermissionsSettingsChanged: false,\n+ isInitialProfileSettingsChanged: false,\n+};\n \n+describe('ProfilePage', () => {\n describe('Should render correctly', () => {\n- it('if full info about profile is in the state', () => {\n+ it('if full profile info is in the state', () => {\n const wrapper = shallow(\n <ProfilePage\n session={session}\n router={router}\n />,\n );\n- wrapper.setState({ profile });\n+ wrapper.setState(state);\n expect(shallowToJson(wrapper)).toMatchSnapshot();\n });\n });\n+\n+ const wrapper = shallow(\n+ <ProfilePage\n+ session={session}\n+ router={router}\n+ />,\n+ );\n+ const instance = wrapper.instance();\n+ describe('onPermissionsSettingsChange', () => {\n+ describe('Should set state correctly', () => {\n+ it('if permissions for student role were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: true,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isEmailVisible',\n+ role: 'student',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isEmailVisible).toEqual({\n+ student: true, all: false,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ it('if permissions for mentor role were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: false,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isLinkedInVisible',\n+ role: 'mentor',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isLinkedInVisible).toEqual({\n+ mentor: false, student: false, all: false,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ it('if permissions for all roles were changed', async () => {\n+ const event = {\n+ target: {\n+ checked: true,\n+ },\n+ }\n+ const changedPermissionsSettings = {\n+ permissionName: 'isEducationVisible',\n+ role: 'all',\n+ };\n+ wrapper.setState(state);\n+ await instance.onPermissionsSettingsChange(event, changedPermissionsSettings);\n+ expect(wrapper.state().profile.permissionsSettings.isEducationVisible).toEqual({\n+ mentor: true, student: true, all: true,\n+ });\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(true);\n+ });\n+ });\n+ });\n+ describe('onProfileSettingsChange', () => {\n+ describe('Should set state correctly', () => {\n+ it('if \"profile.generalInfo.location\" was changed', async () => {\n+ const event = {\n+ id: 123,\n+ name: 'Minsk',\n+ }\n+ const path = 'generalInfo.location';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.locationId).toBe(123);\n+ expect(wrapper.state().profile.generalInfo.locationName).toBe('Minsk');\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ it('if \"profile.generalInfo.englishLevel\" was changed', async () => {\n+ const event = 'b2+';\n+ const path = 'generalInfo.englishLevel';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.englishLevel).toBe('b2+');\n+ });\n+ it('if field added to \"profile.generalInfo.educationHistory\"', async () => {\n+ const event = {\n+ type: 'add',\n+ };\n+ const path = 'generalInfo.educationHistory';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([\n+ {\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ },\n+ {\n+ graduationYear: null,\n+ faculty: null,\n+ university: null,\n+ },\n+ ]);\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ it('if field deleted from \"profile.generalInfo.educationHistory\"', async () => {\n+ const event = {\n+ type: 'delete',\n+ index: 0,\n+ };\n+ const path = 'generalInfo.educationHistory';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.educationHistory).toEqual([]);\n+ });\n+ it('if some other field was changed', async () => {\n+ const event = {\n+ target: {\n+ value: 'Hello everyone, my name is Mike.',\n+ }\n+ };\n+ const path = 'generalInfo.aboutMyself';\n+ wrapper.setState(state);\n+ await instance.onProfileSettingsChange(event, path);\n+ expect(wrapper.state().profile.generalInfo.aboutMyself).toEqual('Hello everyone, my name is Mike.');\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(true);\n+ });\n+ });\n+ });\n+ describe('changeProfilePageMode', () => {\n+ describe('Should set state correctly', () => {\n+ it('if mode = \"edit\" was passed', async () => {\n+ const mode = 'edit';\n+ wrapper.setState({ ...state, isEditingModeEnabled: false });\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\n+ await instance.changeProfilePageMode(mode);\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\n+ });\n+ it('if mode = \"view\" was passed', async () => {\n+ const mode = 'view';\n+ wrapper.setState({ ...state, isEditingModeEnabled: true });\n+ expect(wrapper.state().isEditingModeEnabled).toBe(true);\n+ await instance.changeProfilePageMode(mode);\n+ expect(wrapper.state().isEditingModeEnabled).toBe(false);\n+ });\n+ });\n+ });\n+ describe('saveProfile', () => {\n+ it('Should set state correctly', async () => {\n+ const profile = {\n+ generalInfo: {\n+ aboutMyself: 'Hello',\n+ educationHistory: [{\n+ graduationYear: '2019',\n+ faculty: 'TT',\n+ university: 'Test',\n+ }],\n+ englishLevel: 'c1',\n+ locationId: 778,\n+ locationName: 'Hrodna',\n+ },\n+ contacts: {\n+ telegram: 'test',\n+ },\n+ permissionsSettings: {\n+ isProfileVisible: { all: true },\n+ isAboutVisible: { mentor: true, student: false, all: false },\n+ isEducationVisible: { mentor: true, student: false, all: false },\n+ isEnglishVisible: { student: true, all: true },\n+ isEmailVisible: { student: true, all: true },\n+ isTelegramVisible: { student: true, all: true },\n+ isSkypeVisible: { student: true, all: false },\n+ isPhoneVisible: { student: true, all: false },\n+ isContactsNotesVisible: { student: true, all: false },\n+ isLinkedInVisible: { mentor: true, student: false, all: false },\n+ isPublicFeedbackVisible: { mentor: true, student: true, all: false },\n+ isMentorStatsVisible: { mentor: true, student: true, all: false },\n+ isStudentStatsVisible: { student: false, all: false },\n+ },\n+ };\n+ wrapper.setState({\n+ ...state,\n+ profile,\n+ isInitialPermissionsSettingsChanged: true,\n+ isInitialProfileSettingsChanged: true,\n+ });\n+ await instance.saveProfile();\n+ expect(wrapper.state().isSaving).toBe(false);\n+ expect(wrapper.state().isInitialPermissionsSettingsChanged).toBe(false);\n+ expect(wrapper.state().isInitialProfileSettingsChanged).toBe(false);\n+ expect(wrapper.state().initialPermissionsSettings).toEqual(profile.permissionsSettings);\n+ expect(wrapper.state().initialProfileSettings).toEqual(profile);\n+ });\n+ });\n+ describe('hadStudentCoreJSInterview', () => {\n+ describe('Should return', () => {\n+ it('\"true\" if student has an \"interviewFormAnswers\" in one of the task', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {\n+ interviewFormAnswers: {},\n+ },\n+ {},\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\n+ expect(result).toBe(true);\n+ });\n+ it('\"false\" if student has not an \"interviewFormAnswers\" in one of the task', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {},\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.hadStudentCoreJSInterview(studentStats);\n+ expect(result).toBe(false);\n+ });\n+ });\n+ });\n+ describe('getStudentCoreJSInterviews', () => {\n+ it('Should return info about CoreJS interviews', () => {\n+ const studentStats = [\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ locationName: 'Minsk',\n+ tasks: [\n+ {},\n+ {},\n+ {\n+ interviewer: {\n+ name: 'Dima Petrov',\n+ githubId: 'dip',\n+ },\n+ comment: 'Test',\n+ score: 9,\n+ interviewFormAnswers: {},\n+ },\n+ {},\n+ ],\n+ },\n+ ];\n+ const result = instance.getStudentCoreJSInterviews(studentStats);\n+ expect(result).toEqual([\n+ {\n+ courseFullName: 'test',\n+ courseName: 'test',\n+ interview: {\n+ answers: {},\n+ interviewer: {\n+ name: 'Dima Petrov',\n+ githubId: 'dip',\n+ },\n+ comment: 'Test',\n+ score: 9,\n+ },\n+ locationName: 'Minsk',\n+ },\n+ ]);\n+ });\n+ });\n });\ndiff --git a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\nindex fbd133c..729b2de 100644\n--- a/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\n+++ b/client/src/pages/profile/__tests__/__snapshots__/ProfilePage.tests.tsx.snap\n@@ -1,6 +1,6 @@\n // Jest Snapshot v1, https://goo.gl/fbAQLP\n \n-exports[`ProfilePage Should render correctly if 1`] = `\n+exports[`ProfilePage Should render correctly if full profile info is in the state 1`] = `\n <Fragment>\n <LoadingScreen\n show={true}\n@@ -50,12 +50,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -135,12 +139,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -220,12 +228,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -305,12 +317,16 @@ exports[`ProfilePage Should render correctly if 1`] = `\n data={\n Object {\n \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n+ \"educationHistory\": Array [\n+ Object {\n+ \"faculty\": \"TT\",\n+ \"graduationYear\": \"2019\",\n+ \"university\": \"Test\",\n+ },\n+ ],\n \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n+ \"locationId\": 456,\n+ \"locationName\": \"Brest\",\n }\n }\n isEditingModeEnabled={false}\n@@ -387,15 +403,7 @@ exports[`ProfilePage Should render correctly if 1`] = `\n }\n >\n <ContactsCard\n- data={\n- Object {\n- \"email\": \"petro@gmail.com\",\n- \"notes\": \"discord: @petro, instagram: @petro12\",\n- \"phone\": \"+375292123456\",\n- \"skype\": \"petro:live\",\n- \"telegram\": \"petro\",\n- }\n- }\n+ data={Object {}}\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n onProfileSettingsChange={[Function]}\n@@ -461,84 +469,22 @@ exports[`ProfilePage Should render correctly if 1`] = `\n }\n />\n </div>\n- </Masonry>\n- <JSXStyle\n- id=\"3803498300\"\n- >\n- div.jsx-3803498300{display:-webkit-box;display:-webkit-flex;display:-ms-flexbox;display:flex;margin-left:-16px;width:auto;}\n- </JSXStyle>\n- <JSXStyle\n- id=\"110195169\"\n- >\n- div.jsx-110195169{padding-left:16px;background-clip:padding-box;}\n- </JSXStyle>\n- </div>\n- </Spin>\n- </LoadingScreen>\n-</Fragment>\n-`;\n-\n-exports[`ProfilePage Should render correctly if full info about profile is in the state 1`] = `\n-<Fragment>\n- <LoadingScreen\n- show={true}\n- >\n- <Header\n- isProfileEditingModeEnabled={false}\n- isProfilePage={false}\n- isSaveButtonVisible={false}\n- onChangeProfilePageMode={[Function]}\n- onSaveClick={[Function]}\n- username=\"mikhama\"\n- />\n- <Spin\n- delay={200}\n- size=\"default\"\n- spinning={false}\n- wrapperClassName=\"\"\n- >\n- <div\n- style={\n- Object {\n- \"padding\": 10,\n- }\n- }\n- >\n- <Masonry\n- breakpointCols={\n- Object {\n- \"1100\": 3,\n- \"500\": 1,\n- \"700\": 2,\n- \"default\": 4,\n- }\n- }\n- className=\"jsx-3803498300\"\n- columnClassName=\"jsx-110195169\"\n- >\n <div\n- key=\"card-0\"\n+ key=\"card-5\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <MainCard\n+ <PublicFeedbackCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -602,28 +548,30 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-1\"\n+ key=\"card-6\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <AboutCard\n+ <StudentStatsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {\n+ \"courseFullName\": \"test\",\n+ \"courseName\": \"test\",\n+ \"locationName\": \"Minsk\",\n+ \"tasks\": Array [\n+ Object {\n+ \"interviewFormAnswers\": Object {},\n+ },\n+ ],\n+ },\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -687,28 +635,21 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-2\"\n+ key=\"card-7\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <EnglishCard\n+ <MentorStatsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n isEditingModeEnabled={false}\n onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n permissionsSettings={\n Object {\n \"isAboutVisible\": Object {\n@@ -772,170 +713,44 @@ exports[`ProfilePage Should render correctly if full info about profile is in th\n />\n </div>\n <div\n- key=\"card-3\"\n+ key=\"card-8\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <EducationCard\n+ <CoreJSIviewsCard\n data={\n- Object {\n- \"aboutMyself\": \"Test\",\n- \"educationHistory\": null,\n- \"englishLevel\": \"a2+\",\n- \"githubId\": \"petrov\",\n- \"locationId\": \"1\",\n- \"locationName\": \"Minsk\",\n- \"name\": \"Dzmitry Petrov\",\n- }\n- }\n- isEditingModeEnabled={false}\n- onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n- permissionsSettings={\n- Object {\n- \"isAboutVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isContactsNotesVisible\": Object {\n- \"all\": false,\n- \"student\": true,\n- },\n- \"isEducationVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isEmailVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isEnglishVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isLinkedInVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isMentorStatsVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isPhoneVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isProfileVisible\": Object {\n- \"all\": true,\n- },\n- \"isPublicFeedbackVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isSkypeVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isStudentStatsVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isTelegramVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- }\n+ Array [\n+ Object {\n+ \"courseFullName\": \"test\",\n+ \"courseName\": \"test\",\n+ \"interview\": Object {\n+ \"answers\": Object {},\n+ \"comment\": undefined,\n+ \"interviewer\": undefined,\n+ \"score\": undefined,\n+ },\n+ \"locationName\": \"Minsk\",\n+ },\n+ ]\n }\n />\n </div>\n <div\n- key=\"card-4\"\n+ key=\"card-9\"\n style={\n Object {\n \"marginBottom\": 16,\n }\n }\n >\n- <ContactsCard\n+ <PreScreeningIviewsCard\n data={\n- Object {\n- \"email\": \"petro@gmail.com\",\n- \"notes\": \"discord: @petro, instagram: @petro12\",\n- \"phone\": \"+375292123456\",\n- \"skype\": \"petro:live\",\n- \"telegram\": \"petro\",\n- }\n- }\n- isEditingModeEnabled={false}\n- onPermissionsSettingsChange={[Function]}\n- onProfileSettingsChange={[Function]}\n- permissionsSettings={\n- Object {\n- \"isAboutVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isContactsNotesVisible\": Object {\n- \"all\": false,\n- \"student\": true,\n- },\n- \"isEducationVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isEmailVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isEnglishVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isLinkedInVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": false,\n- },\n- \"isMentorStatsVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isPhoneVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isProfileVisible\": Object {\n- \"all\": true,\n- },\n- \"isPublicFeedbackVisible\": Object {\n- \"all\": false,\n- \"mentor\": true,\n- \"student\": true,\n- },\n- \"isSkypeVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isStudentStatsVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- \"isTelegramVisible\": Object {\n- \"all\": false,\n- \"student\": false,\n- },\n- }\n+ Array [\n+ Object {},\n+ ]\n }\n />\n </div>\n"]
| 1 |
["11ffd5174bd61a2939ae58d2b2d43284302ae490"]
|
["test"]
|
remove broken link
Fixes #1785
|
["diff --git a/docs/content/Caching/Caching.md b/docs/content/Caching/Caching.md\nindex d873a52..9706dda 100644\n--- a/docs/content/Caching/Caching.md\n+++ b/docs/content/Caching/Caching.md\n@@ -135,8 +135,9 @@ If nothing is found in the cache, the query is executed in the database and the \n is returned as well as updating the cache.\n \n If an existing value is present in the cache and the `refreshKey` value for\n-the query hasn't changed, the cached value will be returned. Otherwise, a\n-[query renewal](#in-memory-cache-force-query-renewal) will be performed.\n+the query hasn't changed, the cached value will be returned. Otherwise, a SQL query will be executed either against the pre-aggregations storage or the source database to populate the cache with the results and return them.\n+\n+\n \n ### Refresh Keys\n \n"]
| 1 |
["c351088bce98594c740a39546ce3655c91554a5d"]
|
["docs"]
|
abort parallel stages if one failed
|
["diff --git a/Jenkinsfile b/Jenkinsfile\nindex 168f446..a4da961 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -28,6 +28,7 @@ pipeline {\n }\n \n stage('Verify') {\n+ failFast true\n parallel {\n stage('Tests') {\n steps {\n"]
| 1 |
["28e623b294816c4e070971782a75c8697a11966f"]
|
["cicd"]
|
better tested publishing flow
|
["diff --git a/Makefile.toml b/Makefile.toml\nindex e7d2b20..490d6e2 100644\n--- a/Makefile.toml\n+++ b/Makefile.toml\n@@ -82,7 +82,7 @@ end\n '''\n \n [tasks.build-plugins-release]\n-env = { \"CARGO_MAKE_WORKSPACE_SKIP_MEMBERS\" = [\".\"] }\n+env = { \"CARGO_MAKE_WORKSPACE_INCLUDE_MEMBERS\" = [\"default-plugins/status-bar\", \"default-plugins/strider\", \"default-plugins/tab-bar\"] }\n run_task = { name = \"build-release\", fork = true }\n \n [tasks.wasm-opt-plugins]\n@@ -129,15 +129,16 @@ args = [\"install\", \"cross\"]\n [tasks.publish]\n clear = true\n workspace = false\n-dependencies = [\"build-plugins-release\", \"wasm-opt-plugins\", \"release-commit\", \"build-release\", \"publish-zellij-tile\", \"publish-zellij-tile-utils\", \"publish-zellij-utils\", \"publish-zellij-client\", \"publish-zellij-server\"]\n+dependencies = [\"build-plugins-release\", \"wasm-opt-plugins\", \"release-commit\"]\n run_task = \"publish-zellij\"\n \n [tasks.release-commit]\n dependencies = [\"commit-all\", \"tag-release\"]\n command = \"git\"\n-args = [\"push\", \"--atomic\", \"upstream\", \"main\", \"v${CARGO_MAKE_CRATE_VERSION}\"]\n+args = [\"push\", \"--atomic\", \"origin\", \"main\", \"v${CARGO_MAKE_CRATE_VERSION}\"]\n \n [tasks.commit-all]\n+ignore_errors = true\n command = \"git\"\n args = [\"commit\", \"-aem\", \"chore(release): v${CARGO_MAKE_CRATE_VERSION}\"]\n \n@@ -148,31 +149,32 @@ args = [\"tag\", \"v${CARGO_MAKE_CRATE_VERSION}\"]\n [tasks.publish-zellij-tile]\n ignore_errors = true\n cwd = \"zellij-tile\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-client]\n+ignore_errors = true\n dependencies = [\"publish-zellij-utils\"]\n cwd = \"zellij-client\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-server]\n+ignore_errors = true\n dependencies = [\"publish-zellij-utils\"]\n cwd = \"zellij-server\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-utils]\n+ignore_errors = true\n dependencies = [\"publish-zellij-tile\"]\n cwd = \"zellij-utils\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij-tile-utils]\n ignore_errors = true\n cwd = \"zellij-tile-utils\"\n-command = \"cargo publish && sleep 15\"\n+script = \"cargo publish && sleep 15\"\n \n [tasks.publish-zellij]\n dependencies = [\"publish-zellij-client\", \"publish-zellij-server\", \"publish-zellij-utils\"]\n command = \"cargo\"\n args = [\"publish\"]\n-\n-\n"]
| 1 |
["65574eea5da54bf4722ecb551b42f8ff6088f33b"]
|
["build"]
|
missing transformation for T
|
["diff --git a/src/Tuple/Merge.ts b/src/Tuple/Merge.ts\nindex dfa7ce5..5ba44b7 100644\n--- a/src/Tuple/Merge.ts\n+++ b/src/Tuple/Merge.ts\n@@ -30,7 +30,7 @@ type _MergeFlat<O extends object, O1P extends object> = {\n }\n \n type MergeDeep<T extends any[], T1 extends any[]> =\n- TupleOf<Compute<_MergeDeep<T, Omit<ObjectOf<T1>, keyof T>, ObjectOf<T1>>>>\n+ TupleOf<Compute<_MergeDeep<ObjectOf<T>, Omit<ObjectOf<T1>, keyof T>, ObjectOf<T1>>>>\n // same principle as above, but with a little tweak\n // we keep the original `O1` to know if we can merge\n // => if `O` and `O1` have `object` fields of same name\n"]
| 1 |
["c4d9e5023fa0f88ba283b37da27677ceda1cbfbb"]
|
["fix"]
|
rebuild when environment variables change (#11471)
|
["diff --git a/cli/build.rs b/cli/build.rs\nindex 548fbb5..d7bed21 100644\n--- a/cli/build.rs\n+++ b/cli/build.rs\n@@ -269,8 +269,17 @@ fn main() {\n // To debug snapshot issues uncomment:\n // op_fetch_asset::trace_serializer();\n \n- println!(\"cargo:rustc-env=TS_VERSION={}\", ts_version());\n+ if let Ok(c) = env::var(\"DENO_CANARY\") {\n+ println!(\"cargo:rustc-env=DENO_CANARY={}\", c);\n+ }\n+ println!(\"cargo:rerun-if-env-changed=DENO_CANARY\");\n+\n println!(\"cargo:rustc-env=GIT_COMMIT_HASH={}\", git_commit_hash());\n+ println!(\"cargo:rerun-if-env-changed=GIT_COMMIT_HASH\");\n+\n+ println!(\"cargo:rustc-env=TS_VERSION={}\", ts_version());\n+ println!(\"cargo:rerun-if-env-changed=TS_VERSION\");\n+\n println!(\n \"cargo:rustc-env=DENO_CONSOLE_LIB_PATH={}\",\n deno_console::get_declaration().display()\n@@ -322,9 +331,6 @@ fn main() {\n \n println!(\"cargo:rustc-env=TARGET={}\", env::var(\"TARGET\").unwrap());\n println!(\"cargo:rustc-env=PROFILE={}\", env::var(\"PROFILE\").unwrap());\n- if let Ok(c) = env::var(\"DENO_CANARY\") {\n- println!(\"cargo:rustc-env=DENO_CANARY={}\", c);\n- }\n \n let c = PathBuf::from(env::var_os(\"CARGO_MANIFEST_DIR\").unwrap());\n let o = PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n"]
| 1 |
["63546c15bfb1284ac6d956eee274e6d7cf263a8f"]
|
["build"]
|
simplify aggregate_columns
|
["diff --git a/read_buffer/src/row_group.rs b/read_buffer/src/row_group.rs\nindex 19b0501..f2fbbe3 100644\n--- a/read_buffer/src/row_group.rs\n+++ b/read_buffer/src/row_group.rs\n@@ -924,38 +924,30 @@ impl RowGroup {\n },\n };\n \n- // References to the columns to be used as input for producing the\n- // output aggregates. Also returns the required aggregate type.\n- let input_aggregate_columns = dst\n- .schema\n- .aggregate_columns\n- .iter()\n- .map(|(col_type, agg_type, _)| (self.column_by_name(col_type.as_str()), *agg_type))\n- .collect::<Vec<_>>();\n-\n- let mut output_aggregate_columns = dst\n+ dst.aggregate_cols = dst\n .schema\n .aggregate_columns\n .iter()\n- .map(|(_, agg_type, data_type)| AggregateVec::from((agg_type, data_type)))\n- .collect::<Vec<_>>();\n+ .map(|(col_type, agg_type, data_type)| {\n+ let col = self.column_by_name(col_type.as_str()); // input aggregate column\n+ let mut agg_vec = AggregateVec::from((agg_type, data_type));\n \n- for (i, (col, agg_type)) in input_aggregate_columns.iter().enumerate() {\n- match agg_type {\n- AggregateType::Count => {\n- let value = Value::Scalar(Scalar::U64(col.count(&row_ids) as u64));\n- output_aggregate_columns[i].push(value);\n- }\n- AggregateType::First => unimplemented!(\"First not yet implemented\"),\n- AggregateType::Last => unimplemented!(\"Last not yet implemented\"),\n- AggregateType::Min => output_aggregate_columns[i].push(col.min(&row_ids)),\n- AggregateType::Max => output_aggregate_columns[i].push(col.max(&row_ids)),\n- AggregateType::Sum => {\n- output_aggregate_columns[i].push(Value::Scalar(col.sum(&row_ids)))\n+ // produce single aggregate for the input column subject to a\n+ // predicate filter.\n+ match agg_type {\n+ AggregateType::Count => {\n+ let value = Value::Scalar(Scalar::U64(col.count(&row_ids) as u64));\n+ agg_vec.push(value);\n+ }\n+ AggregateType::First => unimplemented!(\"First not yet implemented\"),\n+ AggregateType::Last => unimplemented!(\"Last not yet implemented\"),\n+ AggregateType::Min => agg_vec.push(col.min(&row_ids)),\n+ AggregateType::Max => agg_vec.push(col.max(&row_ids)),\n+ AggregateType::Sum => agg_vec.push(Value::Scalar(col.sum(&row_ids))),\n }\n- }\n- }\n- dst.aggregate_cols = output_aggregate_columns;\n+ agg_vec\n+ })\n+ .collect::<Vec<_>>();\n }\n \n /// Given the predicate (which may be empty), determine a set of rows\n"]
| 1 |
["300ac2e411ef0ebfe1441d42cc88787116a9afa4"]
|
["refactor"]
|
explain `ChunkOrder` query test scenario
|
["diff --git a/query_tests/src/scenarios.rs b/query_tests/src/scenarios.rs\nindex f0e352b..86df0e9 100644\n--- a/query_tests/src/scenarios.rs\n+++ b/query_tests/src/scenarios.rs\n@@ -1170,6 +1170,21 @@ impl DbSetup for ChunkOrder {\n .clear_lifecycle_action()\n .unwrap();\n \n+ // Now we have the the following chunks (same partition and table):\n+ //\n+ // | ID | order | tag: region | field: user | time |\n+ // | -- | ----- | ----------- | ----------- | ---- |\n+ // | 1 | 1 | \"west\" | 2 | 100 |\n+ // | 2 | 0 | \"west\" | 1 | 100 |\n+ //\n+ // The result after deduplication should be:\n+ //\n+ // | tag: region | field: user | time |\n+ // | ----------- | ----------- | ---- |\n+ // | \"west\" | 2 | 100 |\n+ //\n+ // So the query engine must use `order` as a primary key to sort chunks, NOT `id`.\n+\n let scenario = DbScenario {\n scenario_name: \"chunks where chunk ID alone cannot be used for ordering\".into(),\n db,\n"]
| 1 |
["9a60af7fa3b480e2e04bacd646112cad9aaab6d7"]
|
["docs"]
|
Add the select function for logicflow
|
["diff --git a/packages/core/src/LogicFlow.tsx b/packages/core/src/LogicFlow.tsx\nindex 0d913b7..dcc59b3 100644\n--- a/packages/core/src/LogicFlow.tsx\n+++ b/packages/core/src/LogicFlow.tsx\n@@ -276,6 +276,12 @@ export default class LogicFlow {\n this.translate(-TRANSLATE_X, -TRANSLATE_Y);\n }\n /**\n+ * \u5c06\u56fe\u5f62\u9009\u4e2d\n+ */\n+ select(id: string) {\n+ this.graphModel.selectElementById(id);\n+ }\n+ /**\n * \u5c06\u56fe\u5f62\u5b9a\u4f4d\u5230\u753b\u5e03\u4e2d\u5fc3\n * @param focusOnArgs \u652f\u6301\u7528\u6237\u4f20\u5165\u56fe\u5f62\u5f53\u524d\u7684\u5750\u6807\u6216id\uff0c\u53ef\u4ee5\u901a\u8fc7type\u6765\u533a\u5206\u662f\u8282\u70b9\u8fd8\u662f\u8fde\u7ebf\u7684id\uff0c\u4e5f\u53ef\u4ee5\u4e0d\u4f20\uff08\u515c\u5e95\uff09\n */\ndiff --git a/packages/core/src/model/GraphModel.ts b/packages/core/src/model/GraphModel.ts\nindex 94d0899..10280a9 100644\n--- a/packages/core/src/model/GraphModel.ts\n+++ b/packages/core/src/model/GraphModel.ts\n@@ -481,6 +481,13 @@ class GraphModel {\n this.selectElement?.setSelected(true);\n }\n \n+ @action\n+ selectElementById(id: string) {\n+ this.selectElement?.setSelected(false);\n+ this.selectElement = this.getElement(id) as BaseNodeModel | BaseEdgeModel;\n+ this.selectElement?.setSelected(true);\n+ }\n+\n /* \u4fee\u6539\u8fde\u7ebf\u7c7b\u578b */\n @action\n changeEdgeType(type: string): void {\n"]
| 1 |
["6ae067153cd2608018fd3da76bd6d00a08da4b3a"]
|
["feat"]
|
support custom style by class for mini-map
affects: @logicflow/extension
|
["diff --git a/packages/extension/src/components/mini-map/index.ts b/packages/extension/src/components/mini-map/index.ts\nindex 35cd047..ad5194d 100644\n--- a/packages/extension/src/components/mini-map/index.ts\n+++ b/packages/extension/src/components/mini-map/index.ts\n@@ -2,7 +2,7 @@ import { Extension } from '@logicflow/core';\n \n interface MiniMapPlugin extends Extension {\n init: (option) => void;\n- show: (leftPosition, topPosition) => void;\n+ show: (leftPosition?: number, topPosition?: number) => void;\n hide: () => void;\n [x: string]: any;\n }\n@@ -96,12 +96,13 @@ const MiniMap: MiniMapPlugin = {\n const miniMapContainer = document.createElement('div');\n const miniMapWrap = MiniMap.__miniMapWrap;\n miniMapContainer.appendChild(miniMapWrap);\n- miniMapContainer.style.left = `${left}px`;\n- miniMapContainer.style.top = `${top}px`;\n+ if (typeof left !== 'undefined' && typeof top !== 'undefined') {\n+ miniMapContainer.style.left = `${left}px`;\n+ miniMapContainer.style.top = `${top}px`;\n+ }\n miniMapContainer.style.position = 'absolute';\n miniMapContainer.className = 'lf-mini-map';\n MiniMap.__container.appendChild(miniMapContainer);\n- \n MiniMap.__miniMapWrap.appendChild(MiniMap.__viewport);\n \n const header = document.createElement('div');\n"]
| 1 |
["4c97625774f65ed3d59caefc5c691fabf0adc499"]
|
["feat"]
|
dashboard removed unused code
|
["diff --git a/api/chalicelib/core/dashboard.py b/api/chalicelib/core/dashboard.py\nindex e5bc800..1afa105 100644\n--- a/api/chalicelib/core/dashboard.py\n+++ b/api/chalicelib/core/dashboard.py\n@@ -126,13 +126,6 @@ SESSIONS_META_FIELDS = {\"revId\": \"rev_id\",\n \"browser\": \"user_browser\"}\n \n \n-def __get_domains_errors_neutral(rows):\n- neutral = {l: 0 for l in [i for k in [list(v.keys()) for v in rows] for i in k]}\n- if len(neutral.keys()) == 0:\n- neutral = {\"All\": 0}\n- return neutral\n-\n-\n def get_processed_sessions(project_id, startTimestamp=TimeUTC.now(delta_days=-1),\n endTimestamp=TimeUTC.now(),\n density=7, **args):\n"]
| 1 |
["b7b1d2b315443e1854403c8fe8f871c4632b5d31"]
|
["refactor"]
|
document the use of export buckets for large pre-aggregations
Co-authored-by: Ray Paik <ray@cube.dev>
Co-authored-by: Artyom Keydunov <artyom@cube.dev>
Co-authored-by: Dmitry Patsura <talk@dmtry.me>
|
["diff --git a/docs/content/Caching/Using-Pre-Aggregations.md b/docs/content/Caching/Using-Pre-Aggregations.md\nindex 7882a25..a927241 100644\n--- a/docs/content/Caching/Using-Pre-Aggregations.md\n+++ b/docs/content/Caching/Using-Pre-Aggregations.md\n@@ -65,8 +65,8 @@ In development mode, Cube.js enables background refresh by default and will\n refresh all pre-aggregations marked with the\n [`scheduledRefresh`](/pre-aggregations#scheduled-refresh) parameter.\n \n-Please consult the [Production Checklist][ref-production-checklist-refresh] for\n-best practices on running background refresh in production environments.\n+Please consult the [Production Checklist][ref-prod-list-refresh] for best\n+practices on running background refresh in production environments.\n \n ```js\n cube(`Orders`, {\n@@ -193,10 +193,20 @@ CUBEJS_EXT_DB_TYPE=<SUPPORTED_DB_TYPE_HERE>\n \n <!-- prettier-ignore-start -->\n [[warning |]]\n-| Please be aware of the limitations when using internal and external (outside of Cube Store) pre-aggregations.\n+| Please be aware of the limitations when using internal and external (outside\n+| of Cube Store) pre-aggregations.\n <!-- prettier-ignore-end -->\n \n-\n+<div\n+ style=\"text-align: center\"\n+>\n+ <img\n+ alt=\"Internal vs External vs External with Cube Store diagram\"\n+ src=\"https://raw.githubusercontent.com/cube-js/cube.js/master/docs/content/Caching/pre-aggregations.png\"\n+ style=\"border: none\"\n+ width=\"100%\"\n+ />\n+</div>\n \n #### Some known limitations when using Postgres/MySQL as a storage layer listed below.\n \n@@ -245,15 +255,75 @@ slow to return results.\n (such as AWS Athena and BigQuery). Repeatedly querying for this data can easily\n rack up costs.\n \n+## Optimizing Pre-Aggregation Build Times\n+\n+<!-- prettier-ignore-start -->\n+[[info | ]]\n+| For ideal performance, pre-aggregations should be built using a dedicated\n+| Refresh Worker. [See here for more details][ref-prod-list-refresh].\n+<!-- prettier-ignore-end -->\n+\n+By default, Cube.js will use the source database as a temporary staging area for\n+writing pre-aggregations to determine column types. The data is loaded back into\n+memory before writing them to Cube Store (or an external database).\n+\n+\n+\n+If the dataset is large (more than 100k rows), then Cube.js can face issues when\n+the Node runtime runs out of memory.\n+\n+### Batching\n+\n+Batching is a more performant strategy where Cube.js sends compressed CSVs for\n+Cube Store to ingest.\n+\n+\n+\n+The performance scales to the amount of memory available on the Cube.js\n+instance. Support is currently available for:\n+\n+- [AWS Athena][ref-connect-db-athena] (coming soon)\n+- [AWS Redshift][ref-connect-db-redshift]\n+- [BigQuery][ref-connect-db-bigquery]\n+- [MySQL][ref-connect-db-mysql]\n+- [Postgres][ref-connect-db-postgres]\n+\n+### Export bucket\n+\n+When dealing with larger pre-aggregations (more than 100k rows), performance can\n+be significantly improved by using an export bucket. This allows the source\n+database to persist data directly into cloud storage, which is then loaded into\n+Cube Store in parallel:\n+\n+\n+\n+Export buckets are currently supported for the following databases:\n+\n+- [AWS Athena][ref-connect-db-athena] (coming soon)\n+- [AWS Redshift][ref-connect-db-redshift]\n+- [BigQuery][ref-connect-db-bigquery]\n+- [Snowflake][ref-connect-db-snowflake]\n+\n+When using cloud storage, it is important to correctly configure any data\n+retention policies to clean up the data in the export bucket as Cube.js does not\n+currently manage this. For most use-cases, 1 day is sufficient.\n+\n [wiki-partitioning]: https://en.wikipedia.org/wiki/Partition_(database)\n+[ref-config-connect-db]: /connecting-to-the-database\n+[ref-config-env]: /reference/environment-variables#cube-store\n+[ref-connect-db-athena]: /connecting-to-the-database#notes-aws-athena\n+[ref-connect-db-redshift]: /connecting-to-the-database#notes-aws-redshift\n+[ref-connect-db-bigquery]: /connecting-to-the-database#notes-google-big-query\n+[ref-connect-db-mysql]: /connecting-to-the-database#notes-my-sql\n+[ref-connect-db-postgres]: /connecting-to-the-database#notes-aws-rds-postgres\n+[ref-connect-db-snowflake]: /connecting-to-the-database#notes-snowflake\n [ref-schema-timedimension]: /types-and-formats#dimensions-types-time\n [ref-preaggs]: /pre-aggregations\n [ref-preagg-sched-refresh]: /pre-aggregations#scheduled-refresh\n [ref-preagg-time-part]: /pre-aggregations#rollup-time-partitioning\n [ref-preagg-segment-part]: /pre-aggregations#rollup-segment-partitioning\n [ref-preaggs-refresh-key]: /pre-aggregations#refresh-key\n+[ref-prod-list-refresh]: /deployment/production-checklist#set-up-refresh-worker\n [ref-config-extdbtype]: /config#options-reference-external-db-type\n [ref-config-driverfactory]: /config#options-reference-driver-factory\n [ref-config-extdriverfactory]: /config#options-reference-external-driver-factory\n-[ref-production-checklist-refresh]:\n- /deployment/production-checklist#set-up-refresh-worker\ndiff --git a/docs/content/Caching/build-batching.png b/docs/content/Caching/build-batching.png\nnew file mode 100755\nindex 0000000..d1e28b3\nBinary files /dev/null and b/docs/content/Caching/build-batching.png differ\ndiff --git a/docs/content/Caching/build-export-bucket.png b/docs/content/Caching/build-export-bucket.png\nnew file mode 100755\nindex 0000000..7da2425\nBinary files /dev/null and b/docs/content/Caching/build-export-bucket.png differ\ndiff --git a/docs/content/Caching/build-regular.png b/docs/content/Caching/build-regular.png\nnew file mode 100644\nindex 0000000..af4c3a2\nBinary files /dev/null and b/docs/content/Caching/build-regular.png differ\ndiff --git a/docs/content/Configuration/Connecting-to-the-Database.md b/docs/content/Configuration/Connecting-to-the-Database.md\nindex 321518f..a16ccc4 100644\n--- a/docs/content/Configuration/Connecting-to-the-Database.md\n+++ b/docs/content/Configuration/Connecting-to-the-Database.md\n@@ -49,20 +49,21 @@ CUBEJS_API_SECRET=secret\n The table below shows which environment variables are used for different\n databases:\n \n-| Database | Credentials |\n-| ------------------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n-| PostgreSQL, MySQL, AWS Redshift, Hive/SparkSQL, Oracle | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n-| MS SQL | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_DOMAIN` |\n-| ClickHouse | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_CLICKHOUSE_READONLY` |\n-| AWS Athena | `CUBEJS_AWS_KEY`, `CUBEJS_AWS_SECRET`, `CUBEJS_AWS_REGION`, `CUBEJS_AWS_S3_OUTPUT_LOCATION` |\n-| Google BigQuery | `CUBEJS_DB_BQ_PROJECT_ID`, `CUBEJS_DB_BQ_KEY_FILE or CUBEJS_DB_BQ_CREDENTIALS`, `CUBEJS_DB_BQ_LOCATION`, `CUBEJS_DB_BQ_EXPORT_BUCKET` |\n-| MongoDB | `CUBEJS_DB_HOST`, `CUBEJS_DB_NAME`, `CUBEJS_DB_PORT`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_SSL_CA`, `CUBEJS_DB_SSL_CERT`, `CUBEJS_DB_SSL_CIPHERS`, `CUBEJS_DB_SSL_PASSPHRASE` |\n-| Snowflake | `CUBEJS_DB_SNOWFLAKE_ACCOUNT`, `CUBEJS_DB_SNOWFLAKE_REGION`, `CUBEJS_DB_SNOWFLAKE_WAREHOUSE`, `CUBEJS_DB_SNOWFLAKE_ROLE`, `CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` |\n-| Presto | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_CATALOG`, `CUBEJS_DB_SCHEMA`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n-| Druid | `CUBEJS_DB_URL`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL` |\n-| SQLite | `CUBEJS_DB_NAME` |\n-| Databricks | `CUBEJS_DB_NAME`, `CUBEJS_DB_DATABRICKS_URL` |\n-| Elasticsearch | `CUBEJS_DB_URL`, `CUBEJS_DB_ELASTIC_QUERY_FORMAT`,`CUBEJS_DB_ELASTIC_OPENDISTRO` ,`CUBEJS_DB_ELASTIC_APIKEY_ID`,`CUBEJS_DB_ELASTIC_APIKEY_KEY` |\n+| Database | Credentials |\n+| ---------------------------------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |\n+| PostgreSQL, MySQL, Hive/SparkSQL, Oracle | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n+| AWS Redshift | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, |\n+| MS SQL | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_DOMAIN` |\n+| ClickHouse | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_CLICKHOUSE_READONLY` |\n+| AWS Athena | `CUBEJS_AWS_KEY`, `CUBEJS_AWS_SECRET`, `CUBEJS_AWS_REGION`, `CUBEJS_AWS_S3_OUTPUT_LOCATION` |\n+| Google BigQuery | `CUBEJS_DB_BQ_PROJECT_ID`, `CUBEJS_DB_BQ_KEY_FILE or CUBEJS_DB_BQ_CREDENTIALS`, `CUBEJS_DB_BQ_LOCATION`, |\n+| MongoDB | `CUBEJS_DB_HOST`, `CUBEJS_DB_NAME`, `CUBEJS_DB_PORT`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL`, `CUBEJS_DB_SSL_CA`, `CUBEJS_DB_SSL_CERT`, `CUBEJS_DB_SSL_CIPHERS`, `CUBEJS_DB_SSL_PASSPHRASE` |\n+| Snowflake | `CUBEJS_DB_SNOWFLAKE_ACCOUNT`, `CUBEJS_DB_SNOWFLAKE_REGION`, `CUBEJS_DB_SNOWFLAKE_WAREHOUSE`, `CUBEJS_DB_SNOWFLAKE_ROLE`, `CUBEJS_DB_SNOWFLAKE_CLIENT_SESSION_KEEP_ALIVE`, `CUBEJS_DB_NAME`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SNOWFLAKE_AUTHENTICATOR`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PATH`, `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` |\n+| Presto | `CUBEJS_DB_HOST`, `CUBEJS_DB_PORT`, `CUBEJS_DB_CATALOG`, `CUBEJS_DB_SCHEMA`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS` |\n+| Druid | `CUBEJS_DB_URL`, `CUBEJS_DB_USER`, `CUBEJS_DB_PASS`, `CUBEJS_DB_SSL` |\n+| SQLite | `CUBEJS_DB_NAME` |\n+| Databricks | `CUBEJS_DB_NAME`, `CUBEJS_DB_DATABRICKS_URL` |\n+| Elasticsearch | `CUBEJS_DB_URL`, `CUBEJS_DB_ELASTIC_QUERY_FORMAT`,`CUBEJS_DB_ELASTIC_OPENDISTRO` ,`CUBEJS_DB_ELASTIC_APIKEY_ID`,`CUBEJS_DB_ELASTIC_APIKEY_KEY` |\n \n ## Multiple Databases\n \n@@ -195,18 +196,25 @@ You can learn more about acquiring Google BigQuery credentials\n [here][link-bigquery-getting-started] and [here][link-bigquery-credentials].\n \n You can set the dataset location using the `CUBEJS_DB_BQ_LOCATION` environment\n-variable.\n+variable. All supported regions [can be found\n+here][link-bigquery-regional-locations].\n \n ```dotenv\n CUBEJS_DB_BQ_LOCATION=us-central1\n ```\n \n-You can find more supported regions [here][link-bigquery-regional-locations].\n+#### Configuring an export bucket\n \n-If your pre-aggregations dataset is too big to fit in memory, we **strongly**\n-recommend configuring `CUBEJS_DB_BQ_EXPORT_BUCKET`. This will allow Cube.js to\n-materialize results on an \"export\" bucket which are then loaded into BigQuery,\n-providing better performance.\n+<!-- prettier-ignore-start -->\n+[[warning |]]\n+| BigQuery only supports using Google Cloud Storage for export buckets.\n+<!-- prettier-ignore-end -->\n+\n+##### Google Cloud Storage\n+\n+For [improved pre-aggregation performance with large\n+datasets][ref-caching-large-preaggs], enable the export bucket functionality by\n+configuring Cube.js with the following environment variables:\n \n <!-- prettier-ignore-start -->\n [[info |]]\n@@ -216,7 +224,8 @@ providing better performance.\n <!-- prettier-ignore-end -->\n \n ```dotenv\n-CUBEJS_DB_BQ_EXPORT_BUCKET=export_data_58148478376\n+CUBEJS_DB_EXPORT_BUCKET=export_data_58148478376\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=gcp\n ```\n \n ### MSSQL\n@@ -279,6 +288,73 @@ To connect to a Elasticsearch database, use `CUBEJS_DB_URL` with the username\n and password embedded in the URL, if required. If you're not using Elastic\n Cloud, you **must** specify `CUBEJS_DB_ELASTIC_QUERY_FORMAT`.\n \n+### AWS Redshift\n+\n+#### Configuring an export bucket\n+\n+<!-- prettier-ignore-start -->\n+[[warning |]]\n+| AWS Redshift only supports using AWS S3 for export buckets.\n+<!-- prettier-ignore-end -->\n+\n+##### AWS S3\n+\n+For [improved pre-aggregation performance with large\n+datasets][ref-caching-large-preaggs], enable the export bucket functionality by\n+configuring Cube.js with the following environment variables:\n+\n+<!-- prettier-ignore-start -->\n+[[info |]]\n+| Ensure the AWS credentials are correctly configured in IAM to allow reads and\n+| writes to the export bucket.\n+<!-- prettier-ignore-end -->\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=s3\n+CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3\n+CUBEJS_DB_EXPORT_BUCKET_AWS_KEY=<AWS_KEY>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>\n+```\n+\n+### Snowflake\n+\n+#### Configuring an export bucket\n+\n+Snowflake supports using both AWS S3 and Google Cloud Storage for export bucket\n+functionality.\n+\n+##### AWS S3\n+\n+<!-- prettier-ignore-start -->\n+[[info |]]\n+| Ensure the AWS credentials are correctly configured in IAM to allow reads and\n+| writes to the export bucket.\n+<!-- prettier-ignore-end -->\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=s3\n+CUBEJS_DB_EXPORT_BUCKET=my.bucket.on.s3\n+CUBEJS_DB_EXPORT_BUCKET_AWS_KEY=<AWS_KEY>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET=<AWS_SECRET>\n+CUBEJS_DB_EXPORT_BUCKET_AWS_REGION=<AWS_REGION>\n+```\n+\n+##### Google Cloud Storage\n+\n+Before configuring Cube.js, an [integration must be created and configured in\n+Snowflake][link-snowflake-gcs-integration]. Take note of the integration name\n+(`gcs_int` from the example link) as you'll need it to configure Cube.js.\n+\n+Once the Snowflake integration is set up, configure Cube.js using the following:\n+\n+```dotenv\n+CUBEJS_DB_EXPORT_BUCKET=snowflake-export-bucket\n+CUBEJS_DB_EXPORT_BUCKET_TYPE=gcp\n+CUBEJS_DB_EXPORT_GCS_CREDENTIALS=<BASE64_ENCODED_SERVICE_CREDENTIALS_JSON\n+CUBEJS_DB_EXPORT_INTEGRATION=gcs_int\n+```\n+\n [link-java-guide]:\n https://github.com/cube-js/cube.js/blob/master/packages/cubejs-jdbc-driver/README.md#java-installation\n [link-cubejs-driver-guide]:\n@@ -300,8 +376,11 @@ Cloud, you **must** specify `CUBEJS_DB_ELASTIC_QUERY_FORMAT`.\n https://console.cloud.google.com/apis/credentials/serviceaccountkey\n [link-heroku-postgres-issue]:\n https://help.heroku.com/3DELT3RK/why-can-t-my-third-party-utility-connect-to-heroku-postgres-with-ssl\n+[link-snowflake-gcs-integration]:\n+ https://docs.snowflake.com/en/user-guide/data-load-gcs-config.html\n+[link-bigquery-regional-locations]:\n+ https://cloud.google.com/bigquery/docs/locations#regional-locations\n [ref-cubejs-cli]: /using-the-cubejs-cli\n [ref-enabling-ssl]: #enabling-ssl\n [ref-env-var]: /reference/environment-variables#database-connection\n-[link-bigquery-regional-locations]:\n- https://cloud.google.com/bigquery/docs/locations#regional-locations\n+[ref-caching-large-preaggs]: /using-pre-aggregations#large-pre-aggregations\ndiff --git a/docs/content/Configuration/Environment-Variables-Reference.md b/docs/content/Configuration/Environment-Variables-Reference.md\nindex 692d2c7..6888697 100644\n--- a/docs/content/Configuration/Environment-Variables-Reference.md\n+++ b/docs/content/Configuration/Environment-Variables-Reference.md\n@@ -124,6 +124,18 @@ databases [in this guide][link-connecting-to-db].\n | `CUBEJS_DB_SNOWFLAKE_PRIVATE_KEY_PASS` | Snowflake | The password for the private RSA key. Only required for encrypted keys | A valid password for the encrypted private RSA key |\n | `CUBEJS_DB_DATABRICKS_URL` | Databricks | The URL for a JDBC connection | A valid JDBC URL |\n \n+## Export Bucket\n+\n+| Environment variable | Description | Possible Values |\n+| ------------------------------------ | -------------------------------------------------------------------------------------------- | ---------------------------------------------------------------- |\n+| `CUBEJS_DB_EXPORT_BUCKET` | The name of a bucket in cloud storage | `exports-20210505` |\n+| `CUBEJS_DB_EXPORT_BUCKET_TYPE` | The cloud provider where the bucket is hosted | `gcs`, `s3` |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_KEY` | The AWS Access Key ID to use for the export bucket | A valid AWS Access Key ID |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_SECRET` | The AWS Secret Access Key to use for the export bucket | A valid AWS Secret Access Key |\n+| `CUBEJS_DB_EXPORT_BUCKET_AWS_REGION` | The AWS region of the export bucket | [A valid AWS region][link-aws-regions] |\n+| `CUBEJS_DB_EXPORT_GCS_CREDENTIALS` | A Base64 encoded JSON key file for connecting to Google Cloud | A valid Google Cloud JSON key file encoded as a Base64 string |\n+| `CUBEJS_DB_EXPORT_INTEGRATION` | The name of the integration used in the database. Only required when using Snowflake and GCS | A valid string matching the name of the integration in Snowflake |\n+\n ## Cube Store\n \n | Environment variable | Description | Possible Values |\n"]
| 1 |
["81f37be838d5e3af738908b1bcbf59fea2b45989"]
|
["docs"]
|
serialize access to StreamObserver
|
["diff --git a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\nindex ae2b1c0..8ed64e5 100644\n--- a/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\n+++ b/gateway/src/main/java/io/camunda/zeebe/gateway/impl/stream/ClientStreamAdapter.java\n@@ -22,6 +22,7 @@ import io.camunda.zeebe.transport.stream.api.ClientStreamer;\n import io.camunda.zeebe.util.VisibleForTesting;\n import io.grpc.Status;\n import io.grpc.StatusRuntimeException;\n+import io.grpc.internal.SerializingExecutor;\n import io.grpc.stub.ServerCallStreamObserver;\n import io.grpc.stub.StreamObserver;\n import java.util.concurrent.CompletableFuture;\n@@ -83,12 +84,12 @@ public class ClientStreamAdapter {\n @VisibleForTesting(\"Allow unit testing behavior job handling behavior\")\n static final class ClientStreamConsumerImpl implements ClientStreamConsumer {\n private final StreamObserver<ActivatedJob> responseObserver;\n- private final Executor executor;\n+ private final SerializingExecutor executor;\n \n public ClientStreamConsumerImpl(\n final StreamObserver<ActivatedJob> responseObserver, final Executor executor) {\n this.responseObserver = responseObserver;\n- this.executor = executor;\n+ this.executor = new SerializingExecutor(executor);\n }\n \n @Override\n"]
| 1 |
["22044d58302513f5cf22b06151c4a367bbb88f6e"]
|
["fix"]
|
methods for scanning headers
|
["diff --git a/src/chainstate/stacks/db/headers.rs b/src/chainstate/stacks/db/headers.rs\nindex a12362d..91eb580 100644\n--- a/src/chainstate/stacks/db/headers.rs\n+++ b/src/chainstate/stacks/db/headers.rs\n@@ -31,8 +31,8 @@ use crate::core::FIRST_BURNCHAIN_CONSENSUS_HASH;\n use crate::core::FIRST_STACKS_BLOCK_HASH;\n use crate::util_lib::db::Error as db_error;\n use crate::util_lib::db::{\n- query_count, query_row, query_row_columns, query_row_panic, query_rows, DBConn, FromColumn,\n- FromRow,\n+ query_count, query_row, query_row_columns, query_row_panic, query_rows, u64_to_sql, DBConn,\n+ FromColumn, FromRow,\n };\n use clarity::vm::costs::ExecutionCost;\n \n@@ -360,4 +360,23 @@ impl StacksChainState {\n }\n Ok(ret)\n }\n+\n+ /// Get all headers at a given Stacks height\n+ pub fn get_all_headers_at_height(\n+ conn: &Connection,\n+ height: u64,\n+ ) -> Result<Vec<StacksHeaderInfo>, Error> {\n+ let qry =\n+ \"SELECT * FROM block_headers WHERE block_height = ?1 ORDER BY burn_header_height DESC\";\n+ let args: &[&dyn ToSql] = &[&u64_to_sql(height)?];\n+ query_rows(conn, qry, args).map_err(|e| e.into())\n+ }\n+\n+ /// Get the highest known header height\n+ pub fn get_max_header_height(conn: &Connection) -> Result<u64, Error> {\n+ let qry = \"SELECT block_height FROM block_headers ORDER BY block_height DESC LIMIT 1\";\n+ query_row(conn, qry, NO_PARAMS)\n+ .map(|row_opt: Option<i64>| row_opt.map(|h| h as u64).unwrap_or(0))\n+ .map_err(|e| e.into())\n+ }\n }\n"]
| 1 |
["6a63a9d439e18b6b8483abdf19162f476fcf8563"]
|
["feat"]
|
add tenant property to ProcessMessageSubscriptionRecord
Adds the tenant id property + getter/setter to the record.
|
["diff --git a/protocol-impl/src/main/java/io/camunda/zeebe/protocol/impl/record/value/message/ProcessMessageSubscriptionRecord.java b/protocol-impl/src/main/java/io/camunda/zeebe/protocol/impl/record/value/message/ProcessMessageSubscriptionRecord.java\nindex 0b07e49..790a974 100644\n--- a/protocol-impl/src/main/java/io/camunda/zeebe/protocol/impl/record/value/message/ProcessMessageSubscriptionRecord.java\n+++ b/protocol-impl/src/main/java/io/camunda/zeebe/protocol/impl/record/value/message/ProcessMessageSubscriptionRecord.java\n@@ -37,6 +37,8 @@ public final class ProcessMessageSubscriptionRecord extends UnifiedRecordValue\n private final BooleanProperty interruptingProp = new BooleanProperty(\"interrupting\", true);\n private final StringProperty correlationKeyProp = new StringProperty(\"correlationKey\", \"\");\n private final StringProperty elementIdProp = new StringProperty(\"elementId\", \"\");\n+ private final StringProperty tenantIdProp =\n+ new StringProperty(\"tenantId\", TenantOwned.DEFAULT_TENANT_IDENTIFIER);\n \n public ProcessMessageSubscriptionRecord() {\n declareProperty(subscriptionPartitionIdProp)\n@@ -48,7 +50,8 @@ public final class ProcessMessageSubscriptionRecord extends UnifiedRecordValue\n .declareProperty(interruptingProp)\n .declareProperty(bpmnProcessIdProp)\n .declareProperty(correlationKeyProp)\n- .declareProperty(elementIdProp);\n+ .declareProperty(elementIdProp)\n+ .declareProperty(tenantIdProp);\n }\n \n public void wrap(final ProcessMessageSubscriptionRecord record) {\n@@ -62,6 +65,7 @@ public final class ProcessMessageSubscriptionRecord extends UnifiedRecordValue\n setBpmnProcessId(record.getBpmnProcessIdBuffer());\n setCorrelationKey(record.getCorrelationKeyBuffer());\n setElementId(record.getElementIdBuffer());\n+ setTenantId(record.getTenantId());\n }\n \n @JsonIgnore\n@@ -191,7 +195,11 @@ public final class ProcessMessageSubscriptionRecord extends UnifiedRecordValue\n \n @Override\n public String getTenantId() {\n- // todo(#13289): replace dummy implementation\n- return TenantOwned.DEFAULT_TENANT_IDENTIFIER;\n+ return bufferAsString(tenantIdProp.getValue());\n+ }\n+\n+ public ProcessMessageSubscriptionRecord setTenantId(final String tenantId) {\n+ tenantIdProp.setValue(tenantId);\n+ return this;\n }\n }\n"]
| 1 |
["497a7d0676564774c18435e082e7de9132a251c7"]
|
["feat"]
|
Introduce timediff fn (stub)
|
["diff --git a/rust/Cargo.lock b/rust/Cargo.lock\nindex b42616f..4795eb6 100644\n--- a/rust/Cargo.lock\n+++ b/rust/Cargo.lock\n@@ -1287,7 +1287,7 @@ dependencies = [\n [[package]]\n name = \"datafusion\"\n version = \"5.1.0\"\n-source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=8df4132b83d896a0d3db5c82a4eaaa3eaa285d15#8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\"\n+source = \"git+https://github.com/cube-js/arrow-datafusion.git?rev=868f3c4de13d13cda84cee33475b9782b94fa60c#868f3c4de13d13cda84cee33475b9782b94fa60c\"\n dependencies = [\n \"ahash 0.7.4\",\n \"arrow 6.0.0\",\ndiff --git a/rust/cubesql/Cargo.toml b/rust/cubesql/Cargo.toml\nindex 3cb386a..9aef494 100644\n--- a/rust/cubesql/Cargo.toml\n+++ b/rust/cubesql/Cargo.toml\n@@ -9,7 +9,7 @@ documentation = \"https://cube.dev/docs\"\n homepage = \"https://cube.dev\"\n \n [dependencies]\n-datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"8df4132b83d896a0d3db5c82a4eaaa3eaa285d15\", default-features = false, features = [\"unicode_expressions\"] }\n+datafusion = { git = 'https://github.com/cube-js/arrow-datafusion.git', rev = \"868f3c4de13d13cda84cee33475b9782b94fa60c\", default-features = false, features = [\"unicode_expressions\"] }\n anyhow = \"1.0\"\n thiserror = \"1.0\"\n cubeclient = { path = \"../cubeclient\" }\ndiff --git a/rust/cubesql/src/compile/engine/df/intervals.rs b/rust/cubesql/src/compile/engine/df/intervals.rs\nnew file mode 100644\nindex 0000000..9e6cb7e\n--- /dev/null\n+++ b/rust/cubesql/src/compile/engine/df/intervals.rs\n@@ -0,0 +1,51 @@\n+#[macro_export]\n+macro_rules! make_string_interval_year_month {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let interval = $array.value($row) as f64;\n+ let years = (interval / 12_f64).floor();\n+ let month = interval - (years * 12_f64);\n+\n+ format!(\n+ \"{} years {} mons 0 days 0 hours 0 mins 0.00 secs\",\n+ years, month,\n+ )\n+ };\n+\n+ s\n+ }};\n+}\n+\n+#[macro_export]\n+macro_rules! make_string_interval_day_time {\n+ ($array: ident, $row: ident) => {{\n+ let s = if $array.is_null($row) {\n+ \"NULL\".to_string()\n+ } else {\n+ let value: u64 = $array.value($row) as u64;\n+\n+ let days_parts: i32 = ((value & 0xFFFFFFFF00000000) >> 32) as i32;\n+ let milliseconds_part: i32 = (value & 0xFFFFFFFF) as i32;\n+\n+ let secs = milliseconds_part / 1000;\n+ let mins = secs / 60;\n+ let hours = mins / 60;\n+\n+ let secs = secs - (mins * 60);\n+ let mins = mins - (hours * 60);\n+\n+ format!(\n+ \"0 years 0 mons {} days {} hours {} mins {}.{:02} secs\",\n+ days_parts,\n+ hours,\n+ mins,\n+ secs,\n+ (milliseconds_part % 1000),\n+ )\n+ };\n+\n+ s\n+ }};\n+}\ndiff --git a/rust/cubesql/src/compile/engine/df/mod.rs b/rust/cubesql/src/compile/engine/df/mod.rs\nindex a19a970..3097523 100644\n--- a/rust/cubesql/src/compile/engine/df/mod.rs\n+++ b/rust/cubesql/src/compile/engine/df/mod.rs\n@@ -1 +1,2 @@\n pub mod coerce;\n+pub mod intervals;\ndiff --git a/rust/cubesql/src/compile/engine/udf.rs b/rust/cubesql/src/compile/engine/udf.rs\nindex 55b8bc1..0e160b3 100644\n--- a/rust/cubesql/src/compile/engine/udf.rs\n+++ b/rust/cubesql/src/compile/engine/udf.rs\n@@ -1,14 +1,19 @@\n use std::any::type_name;\n use std::sync::Arc;\n \n+\n use datafusion::{\n arrow::{\n array::{\n ArrayRef, BooleanArray, BooleanBuilder, GenericStringArray, Int32Builder,\n- PrimitiveArray, StringBuilder, UInt32Builder,\n+ IntervalDayTimeBuilder, PrimitiveArray, StringBuilder,\n+ UInt32Builder,\n },\n compute::cast,\n- datatypes::{DataType, Int64Type},\n+ datatypes::{\n+ DataType, Int64Type, IntervalUnit, TimeUnit,\n+ TimestampNanosecondType,\n+ },\n },\n error::DataFusionError,\n logical_plan::create_udf,\n@@ -399,3 +404,63 @@ pub fn create_convert_tz_udf() -> ScalarUDF {\n &fun,\n )\n }\n+\n+pub fn create_timediff_udf() -> ScalarUDF {\n+ let fun = make_scalar_function(move |args: &[ArrayRef]| {\n+ assert!(args.len() == 2);\n+\n+ let left_dt = &args[0];\n+ let right_dt = &args[1];\n+\n+ let left_date = match left_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(left_dt, \"left_dt\", TimestampNanosecondType);\n+ let ts = arr.value(0);\n+\n+ // NaiveDateTime::from_timestamp(ts, 0)\n+ ts\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"left_dt argument must be a Timestamp, actual: {}\",\n+ left_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let right_date = match right_dt.data_type() {\n+ DataType::Timestamp(TimeUnit::Nanosecond, _) => {\n+ let arr = downcast_primitive_arg!(right_dt, \"right_dt\", TimestampNanosecondType);\n+ arr.value(0)\n+ }\n+ _ => {\n+ return Err(DataFusionError::Execution(format!(\n+ \"right_dt argument must be a Timestamp, actual: {}\",\n+ right_dt.data_type()\n+ )));\n+ }\n+ };\n+\n+ let diff = right_date - left_date;\n+ if diff != 0 {\n+ return Err(DataFusionError::NotImplemented(format!(\n+ \"timediff is not implemented, it's stub\"\n+ )));\n+ }\n+\n+ let mut interal_arr = IntervalDayTimeBuilder::new(1);\n+ interal_arr.append_value(diff)?;\n+\n+ Ok(Arc::new(interal_arr.finish()) as ArrayRef)\n+ });\n+\n+ let return_type: ReturnTypeFunction =\n+ Arc::new(move |_| Ok(Arc::new(DataType::Interval(IntervalUnit::DayTime))));\n+\n+ ScalarUDF::new(\n+ \"timediff\",\n+ &Signature::any(2, Volatility::Immutable),\n+ &return_type,\n+ &fun,\n+ )\n+}\ndiff --git a/rust/cubesql/src/compile/mod.rs b/rust/cubesql/src/compile/mod.rs\nindex a88da57..6121aa0 100644\n--- a/rust/cubesql/src/compile/mod.rs\n+++ b/rust/cubesql/src/compile/mod.rs\n@@ -32,8 +32,8 @@ use self::engine::context::SystemVar;\n use self::engine::provider::CubeContext;\n use self::engine::udf::{\n create_connection_id_udf, create_convert_tz_udf, create_current_user_udf, create_db_udf,\n- create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_user_udf,\n- create_version_udf,\n+ create_if_udf, create_instr_udf, create_isnull_udf, create_least_udf, create_timediff_udf,\n+ create_user_udf, create_version_udf,\n };\n use self::parser::parse_sql_to_statement;\n \n@@ -1450,6 +1450,7 @@ impl QueryPlanner {\n ctx.register_udf(create_if_udf());\n ctx.register_udf(create_least_udf());\n ctx.register_udf(create_convert_tz_udf());\n+ ctx.register_udf(create_timediff_udf());\n \n let state = ctx.state.lock().unwrap().clone();\n let cube_ctx = CubeContext::new(&state, &self.context.cubes);\n@@ -3226,6 +3227,25 @@ mod tests {\n }\n \n #[tokio::test]\n+ async fn test_timediff() -> Result<(), CubeError> {\n+ assert_eq!(\n+ execute_df_query(\n+ \"select \\\n+ timediff('1994-11-26T13:25:00.000Z'::timestamp, '1994-11-26T13:25:00.000Z'::timestamp) as r1\n+ \".to_string()\n+ )\n+ .await?,\n+ \"+------------------------------------------------+\\n\\\n+ | r1 |\\n\\\n+ +------------------------------------------------+\\n\\\n+ | 0 years 0 mons 0 days 0 hours 0 mins 0.00 secs |\\n\\\n+ +------------------------------------------------+\"\n+ );\n+\n+ Ok(())\n+ }\n+\n+ #[tokio::test]\n async fn test_metabase() -> Result<(), CubeError> {\n assert_eq!(\n execute_df_query(\ndiff --git a/rust/cubesql/src/mysql/dataframe.rs b/rust/cubesql/src/mysql/dataframe.rs\nindex fa246aa..2443458 100644\n--- a/rust/cubesql/src/mysql/dataframe.rs\n+++ b/rust/cubesql/src/mysql/dataframe.rs\n@@ -3,9 +3,10 @@ use std::fmt::{self, Debug, Formatter};\n use chrono::{SecondsFormat, TimeZone, Utc};\n use comfy_table::{Cell, Table};\n use datafusion::arrow::array::{\n- Array, Float64Array, Int32Array, Int64Array, StringArray, TimestampMicrosecondArray,\n- UInt32Array,\n+ Array, Float64Array, Int32Array, Int64Array, IntervalDayTimeArray, IntervalYearMonthArray,\n+ StringArray, TimestampMicrosecondArray, UInt32Array,\n };\n+use datafusion::arrow::datatypes::IntervalUnit;\n use datafusion::arrow::{\n array::{BooleanArray, TimestampNanosecondArray, UInt64Array},\n datatypes::{DataType, TimeUnit},\n@@ -15,6 +16,7 @@ use log::{error, warn};\n use msql_srv::{ColumnFlags, ColumnType};\n \n use crate::{compile::builder::CompiledQueryFieldMeta, CubeError};\n+use crate::{make_string_interval_day_time, make_string_interval_year_month};\n \n #[derive(Clone, Debug)]\n pub struct Column {\n@@ -309,6 +311,7 @@ pub fn arrow_to_column_type(arrow_type: DataType) -> Result<ColumnType, CubeErro\n DataType::Binary => Ok(ColumnType::MYSQL_TYPE_BLOB),\n DataType::Utf8 | DataType::LargeUtf8 => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Timestamp(_, _) => Ok(ColumnType::MYSQL_TYPE_STRING),\n+ DataType::Interval(_) => Ok(ColumnType::MYSQL_TYPE_STRING),\n DataType::Float16 | DataType::Float64 => Ok(ColumnType::MYSQL_TYPE_DOUBLE),\n DataType::Boolean => Ok(ColumnType::MYSQL_TYPE_TINY),\n DataType::Int8\n@@ -402,6 +405,24 @@ pub fn batch_to_dataframe(batches: &Vec<RecordBatch>) -> Result<DataFrame, CubeE\n });\n }\n }\n+ DataType::Interval(IntervalUnit::DayTime) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalDayTimeArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_day_time!(a, i)));\n+ }\n+ }\n+ DataType::Interval(IntervalUnit::YearMonth) => {\n+ let a = array\n+ .as_any()\n+ .downcast_ref::<IntervalYearMonthArray>()\n+ .unwrap();\n+ for i in 0..num_rows {\n+ rows[i].push(TableValue::String(make_string_interval_year_month!(a, i)));\n+ }\n+ }\n DataType::Boolean => {\n let a = array.as_any().downcast_ref::<BooleanArray>().unwrap();\n for i in 0..num_rows {\n"]
| 1 |
["29dfb9716298c5a579c0ffba6742e13a29325670"]
|
["feat"]
|
refactor generate_completion
|
["diff --git a/src/lib.rs b/src/lib.rs\nindex dfd8014..15850f7 100644\n--- a/src/lib.rs\n+++ b/src/lib.rs\n@@ -1,11 +1,106 @@\n //! Generates [Nushell](https://github.com/nushell/nushell) completions for [`clap`](https://github.com/clap-rs/clap) based CLIs\n \n-use clap::Command;\n+use clap::{Arg, Command};\n use clap_complete::Generator;\n \n /// Generate Nushell complete file\n pub struct Nushell;\n \n+enum Argument {\n+ Short(char),\n+ Long(String),\n+ ShortAndLong(char, String),\n+ Positional(String, bool),\n+}\n+\n+struct ArgumentLine {\n+ arg: Argument,\n+ takes_values: bool,\n+ help: Option<String>,\n+}\n+\n+impl From<&Arg> for ArgumentLine {\n+ fn from(arg: &Arg) -> Self {\n+ let takes_values = arg\n+ .get_num_args()\n+ .map(|v| v.takes_values())\n+ .unwrap_or(false);\n+\n+ let help = arg.get_help().map(|s| s.to_string());\n+\n+ if arg.is_positional() {\n+ let id = arg.get_id().to_string();\n+ let required = arg.is_required_set();\n+ let arg = Argument::Positional(id, required);\n+\n+ return Self {\n+ arg,\n+ takes_values,\n+ help,\n+ };\n+ }\n+\n+ let short = arg.get_short();\n+ let long = arg.get_long();\n+\n+ match short {\n+ Some(short) => match long {\n+ Some(long) => Self {\n+ arg: Argument::ShortAndLong(short, long.into()),\n+ takes_values,\n+ help,\n+ },\n+ None => Self {\n+ arg: Argument::Short(short),\n+ takes_values,\n+ help,\n+ },\n+ },\n+ None => match long {\n+ Some(long) => Self {\n+ arg: Argument::Long(long.into()),\n+ takes_values,\n+ help,\n+ },\n+ None => unreachable!(\"No short or long option found\"),\n+ },\n+ }\n+ }\n+}\n+\n+impl ToString for ArgumentLine {\n+ fn to_string(&self) -> String {\n+ let mut s = String::new();\n+\n+ match &self.arg {\n+ Argument::Short(short) => s.push_str(format!(\" -{}\", short).as_str()),\n+ Argument::Long(long) => s.push_str(format!(\" --{}\", long).as_str()),\n+ Argument::ShortAndLong(short, long) => {\n+ s.push_str(format!(\" --{}(-{})\", long, short).as_str())\n+ }\n+ Argument::Positional(positional, required) => {\n+ s.push_str(format!(\" {}\", positional).as_str());\n+\n+ if !*required {\n+ s.push('?');\n+ }\n+ }\n+ }\n+\n+ if self.takes_values {\n+ s.push_str(\": string\");\n+ }\n+\n+ if let Some(help) = &self.help {\n+ s.push_str(format!(\"\\t# {}\", help).as_str());\n+ }\n+\n+ s.push('\\n');\n+\n+ s\n+ }\n+}\n+\n impl Generator for Nushell {\n fn file_name(&self, name: &str) -> String {\n format!(\"{}.nu\", name)\n@@ -37,51 +132,18 @@ fn generate_completion(completions: &mut String, cmd: &Command, is_subcommand: b\n \n let bin_name = cmd.get_bin_name().expect(\"Failed to get bin name\");\n \n- if is_subcommand {\n- completions.push_str(format!(\" export extern \\\"{}\\\" [\\n\", bin_name).as_str());\n+ let name = if is_subcommand {\n+ format!(r#\"\"{}\"\"#, bin_name)\n } else {\n- completions.push_str(format!(\" export extern {} [\\n\", bin_name).as_str());\n- }\n+ bin_name.into()\n+ };\n \n- let mut s = String::new();\n- for arg in cmd.get_arguments() {\n- if arg.is_positional() {\n- s.push_str(format!(\" {}\", arg.get_id()).as_str());\n- if !arg.is_required_set() {\n- s.push('?');\n- }\n- }\n-\n- let long = arg.get_long();\n- if let Some(opt) = long {\n- s.push_str(format!(\" --{}\", opt).as_str());\n- }\n+ completions.push_str(format!(\" export extern {} [\\n\", name).as_str());\n \n- let short = arg.get_short();\n- if let Some(opt) = short {\n- if long.is_some() {\n- s.push_str(format!(\"(-{})\", opt).as_str());\n- } else {\n- s.push_str(format!(\" -{}\", opt).as_str());\n- }\n- }\n-\n- if let Some(v) = arg.get_num_args() {\n- if v.takes_values() {\n- // TODO: add more types?\n- // TODO: add possible values?\n- s.push_str(\": string\");\n- }\n- }\n-\n- if let Some(msg) = arg.get_help() {\n- if arg.is_positional() || long.is_some() || short.is_some() {\n- s.push_str(format!(\"\\t# {}\", msg).as_str());\n- }\n- }\n-\n- s.push('\\n');\n- }\n+ let s: String = cmd\n+ .get_arguments()\n+ .map(|arg| ArgumentLine::from(arg).to_string())\n+ .collect();\n \n completions.push_str(&s);\n completions.push_str(\" ]\\n\\n\");\n"]
| 1 |
["f1bc5a554af4e617c7d7508f7f16f8fd25c78c91"]
|
["refactor"]
|
pass absolute burnchain block height to pox sync watchdog so we correctly infer ibd status
|
["diff --git a/testnet/stacks-node/src/run_loop/neon.rs b/testnet/stacks-node/src/run_loop/neon.rs\nindex 677749b..dc4a7bd 100644\n--- a/testnet/stacks-node/src/run_loop/neon.rs\n+++ b/testnet/stacks-node/src/run_loop/neon.rs\n@@ -411,7 +411,6 @@ impl RunLoop {\n \n let mut burnchain_height = sortition_db_height;\n let mut num_sortitions_in_last_cycle = 1;\n- let mut learned_burnchain_height = false;\n \n // prepare to fetch the first reward cycle!\n target_burnchain_block_height = burnchain_height + pox_constants.reward_cycle_length as u64;\n@@ -439,18 +438,16 @@ impl RunLoop {\n break;\n }\n \n+ let remote_chain_height = burnchain.get_headers_height();\n+\n // wait for the p2p state-machine to do at least one pass\n- debug!(\"Wait until we reach steady-state before processing more burnchain blocks...\");\n+ debug!(\"Wait until we reach steady-state before processing more burnchain blocks (chain height is {}, we are at {})...\", remote_chain_height, burnchain_height);\n \n // wait until it's okay to process the next sortitions\n let ibd = match pox_watchdog.pox_sync_wait(\n &burnchain_config,\n &burnchain_tip,\n- if learned_burnchain_height {\n- Some(burnchain_height)\n- } else {\n- None\n- },\n+ Some(remote_chain_height),\n num_sortitions_in_last_cycle,\n ) {\n Ok(ibd) => ibd,\n@@ -478,7 +475,6 @@ impl RunLoop {\n };\n \n // *now* we know the burnchain height\n- learned_burnchain_height = true;\n burnchain_tip = next_burnchain_tip;\n burnchain_height = cmp::min(burnchain_height + 1, target_burnchain_block_height);\n \n"]
| 1 |
["5b70e008c57efc89da4061f9adb7d0491b2ea644"]
|
["fix"]
|
cue linter: include all CUE files
Signed-off-by: Andrea Luzzardi <aluzzardi@gmail.com>
|
["diff --git a/ci/cue/lint.cue b/ci/cue/lint.cue\nindex cdda698..6aac265 100644\n--- a/ci/cue/lint.cue\n+++ b/ci/cue/lint.cue\n@@ -39,7 +39,7 @@ import (\n \t\t\t// CACHE: copy only *.cue files\n \t\t\tdocker.#Copy & {\n \t\t\t\tcontents: source\n-\t\t\t\tinclude: [\"*.cue\"]\n+\t\t\t\tinclude: [\"*.cue\", \"**/*.cue\"]\n \t\t\t\tdest: \"/cue\"\n \t\t\t},\n \n"]
| 1 |
["4c44543a3d9eea37e90a2316717feb01c0e0d83a"]
|
["cicd"]
|
fix the contact icon in the e2e test
references #6364
|
["diff --git a/ionic/components/toolbar/test/colors/main.html b/ionic/components/toolbar/test/colors/main.html\nindex 24e48ca..73fe306 100644\n--- a/ionic/components/toolbar/test/colors/main.html\n+++ b/ionic/components/toolbar/test/colors/main.html\n@@ -9,8 +9,8 @@\n <ion-icon name=\"menu\"></ion-icon>\n </button>\n <ion-buttons start>\n- <button>\n- <ion-icon ios=\"contact\"></ion-icon>\n+ <button showWhen=\"ios\">\n+ <ion-icon name=\"contact\"></ion-icon>\n </button>\n <button>\n <ion-icon name=\"search\"></ion-icon>\n@@ -27,8 +27,8 @@\n \n <ion-toolbar primary>\n <ion-buttons start>\n- <button>\n- <ion-icon ios=\"contact\"></ion-icon>\n+ <button showWhen=\"ios\">\n+ <ion-icon name=\"contact\"></ion-icon>\n </button>\n <button>\n <ion-icon name=\"search\"></ion-icon>\n@@ -45,8 +45,8 @@\n \n <ion-toolbar primary>\n <ion-buttons start>\n- <button class=\"activated\">\n- <ion-icon ios=\"contact\"></ion-icon>\n+ <button cla showWhen=\"ios\"ss=\"activated\">\n+ <ion-icon name=\"contact\"></ion-icon>\n </button>\n <button class=\"activated\">\n <ion-icon name=\"search\"></ion-icon>\n"]
| 1 |
["90d4c1d5bcc9f2dce6e1da0cb953f04f46fb1380"]
|
["test"]
|
fixed tick interval
|
["diff --git a/backend/services/integrations/main.go b/backend/services/integrations/main.go\nindex 4a5e764..35c3ff2 100644\n--- a/backend/services/integrations/main.go\n+++ b/backend/services/integrations/main.go\n@@ -54,7 +54,7 @@ func main() {\n \tsigchan := make(chan os.Signal, 1)\n \tsignal.Notify(sigchan, syscall.SIGINT, syscall.SIGTERM)\n \n-\ttick := time.Tick(intervals.INTEGRATIONS_REQUEST_INTERVAL)\n+\ttick := time.Tick(intervals.INTEGRATIONS_REQUEST_INTERVAL * time.Millisecond)\n \n \tlog.Printf(\"Integration service started\\n\")\n \tmanager.RequestAll()\n@@ -66,7 +66,7 @@ func main() {\n \t\t\tpg.Close()\n \t\t\tos.Exit(0)\n \t\tcase <-tick:\n-\t\t\t// log.Printf(\"Requesting all...\\n\")\n+\t\t\tlog.Printf(\"Requesting all...\\n\")\n \t\t\tmanager.RequestAll()\n \t\tcase event := <-manager.Events:\n \t\t\t// log.Printf(\"New integration event: %v\\n\", *event.RawErrorEvent)\n"]
| 1 |
["7dc3b70fe40fc7de255a28bb3098bcb8c0d35365"]
|
["fix"]
|
add clean up test
Add another clean up test, which verifies that the state is cleaned up
after the timer (non-recurring) is triggered.
|
["diff --git a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\nindex d36b4c9..ca5047f 100644\n--- a/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n+++ b/engine/src/test/java/io/camunda/zeebe/engine/state/ProcessExecutionCleanStateTest.java\n@@ -630,6 +630,40 @@ public final class ProcessExecutionCleanStateTest {\n }\n \n @Test\n+ public void testProcessWithTriggerTimerStartEvent() {\n+ // given\n+ final var deployment =\n+ engineRule\n+ .deployment()\n+ .withXmlResource(\n+ Bpmn.createExecutableProcess(PROCESS_ID)\n+ .startEvent()\n+ .timerWithDate(\"=now() + duration(\\\"PT15S\\\")\")\n+ .endEvent()\n+ .done())\n+ .deploy();\n+\n+ final var processDefinitionKey =\n+ deployment.getValue().getProcessesMetadata().get(0).getProcessDefinitionKey();\n+\n+ // when\n+ engineRule.awaitProcessingOf(\n+ RecordingExporter.timerRecords(TimerIntent.CREATED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .getFirst());\n+\n+ engineRule.increaseTime(Duration.ofSeconds(15));\n+\n+ RecordingExporter.processInstanceRecords(ProcessInstanceIntent.ELEMENT_COMPLETED)\n+ .withProcessDefinitionKey(processDefinitionKey)\n+ .withElementType(BpmnElementType.PROCESS)\n+ .await();\n+\n+ // then\n+ assertThatStateIsEmpty();\n+ }\n+\n+ @Test\n public void testProcessWithTimerStartEventRedeployment() {\n // given\n final var deployment =\n"]
| 1 |
["aa746b764e6c54bbbd631210fce35df842d09b12"]
|
["test"]
|
improve test stability
* improve test stability by waiting until the message subscription is opened. Message subscriptions are opened outside of the context of the stream processor. Sometimes this may take a while.
* enable running the tests repeatably by fixing the engine rule
|
["diff --git a/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java b/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\nindex 0c539b9..ffaead1 100644\n--- a/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processing/bpmn/subprocess/InterruptingEventSubprocessTest.java\n@@ -334,22 +334,31 @@ public class InterruptingEventSubprocessTest {\n \"timer-event-subprocess\",\n s -> s.startEvent(\"other-timer\").timerWithDuration(\"P1D\").endEvent());\n \n- final long wfInstanceKey = createInstanceAndTriggerEvent(workflow(eventSubprocess));\n+ final long wfInstanceKey = createInstanceAndWaitForTask(workflow(eventSubprocess));\n+\n+ RecordingExporter.messageSubscriptionRecords(MessageSubscriptionIntent.OPENED)\n+ .withWorkflowInstanceKey(wfInstanceKey)\n+ .withMessageName(\"other-message\")\n+ .await();\n+\n+ triggerEventSubprocess.accept(wfInstanceKey);\n \n // then\n assertThat(\n- RecordingExporter.messageSubscriptionRecords()\n+ RecordingExporter.records()\n+ .limitToWorkflowInstance(wfInstanceKey)\n+ .messageSubscriptionRecords()\n .withWorkflowInstanceKey(wfInstanceKey)\n- .withMessageName(\"other-message\")\n- .limit(4))\n+ .withMessageName(\"other-message\"))\n .extracting(Record::getIntent)\n .contains(MessageSubscriptionIntent.CLOSED);\n \n assertThat(\n- RecordingExporter.timerRecords()\n+ RecordingExporter.records()\n+ .limitToWorkflowInstance(wfInstanceKey)\n+ .timerRecords()\n .withWorkflowInstanceKey(wfInstanceKey)\n- .withHandlerNodeId(\"other-timer\")\n- .limit(4))\n+ .withHandlerNodeId(\"other-timer\"))\n .extracting(Record::getIntent)\n .contains(TimerIntent.CANCELED);\n }\ndiff --git a/engine/src/test/java/io/zeebe/engine/util/EngineRule.java b/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\nindex 8576be5..50040f4 100644\n--- a/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\n+++ b/engine/src/test/java/io/zeebe/engine/util/EngineRule.java\n@@ -71,7 +71,7 @@ public final class EngineRule extends ExternalResource {\n \n private static final int PARTITION_ID = Protocol.DEPLOYMENT_PARTITION;\n private static final RecordingExporter RECORDING_EXPORTER = new RecordingExporter();\n- private StreamProcessorRule environmentRule;\n+ private final StreamProcessorRule environmentRule;\n private final RecordingExporterTestWatcher recordingExporterTestWatcher =\n new RecordingExporterTestWatcher();\n private final int partitionCount;\n@@ -80,7 +80,7 @@ public final class EngineRule extends ExternalResource {\n \n private final Int2ObjectHashMap<SubscriptionCommandMessageHandler> subscriptionHandlers =\n new Int2ObjectHashMap<>();\n- private final ExecutorService subscriptionHandlerExecutor = Executors.newSingleThreadExecutor();\n+ private ExecutorService subscriptionHandlerExecutor;\n \n private EngineRule(final int partitionCount) {\n this(partitionCount, false);\n@@ -115,6 +115,8 @@ public final class EngineRule extends ExternalResource {\n \n @Override\n protected void before() {\n+ subscriptionHandlerExecutor = Executors.newSingleThreadExecutor();\n+\n if (!explicitStart) {\n startProcessors();\n }\n@@ -123,7 +125,6 @@ public final class EngineRule extends ExternalResource {\n @Override\n protected void after() {\n subscriptionHandlerExecutor.shutdown();\n- environmentRule = null;\n subscriptionHandlers.clear();\n }\n \ndiff --git a/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java b/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\nindex 0f3da21..af6c50e 100755\n--- a/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\n+++ b/engine/src/test/java/io/zeebe/engine/util/StreamProcessorRule.java\n@@ -248,6 +248,7 @@ public final class StreamProcessorRule implements TestRule {\n @Override\n protected void after() {\n streams = null;\n+ streamProcessingComposite = null;\n }\n }\n \ndiff --git a/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java b/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\nindex 293df93..a3ede18 100644\n--- a/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\n+++ b/test-util/src/main/java/io/zeebe/test/util/record/RecordStream.java\n@@ -81,4 +81,9 @@ public final class RecordStream extends ExporterRecordStream<RecordValue, Record\n return new IncidentRecordStream(\n filter(r -> r.getValueType() == ValueType.INCIDENT).map(Record.class::cast));\n }\n+\n+ public MessageSubscriptionRecordStream messageSubscriptionRecords() {\n+ return new MessageSubscriptionRecordStream(\n+ filter(r -> r.getValueType() == ValueType.MESSAGE_SUBSCRIPTION).map(Record.class::cast));\n+ }\n }\n"]
| 1 |
["00be00f2dd0ba7e4bfa4f5dfb74135761f5f86ec"]
|
["test"]
|
switch QA to new testbench-1.x-prod
In order to use the new Testbench that is compatible with Zeebe 1.x
versions, this switches the client id and secrets used by the QA stage.
|
["diff --git a/Jenkinsfile b/Jenkinsfile\nindex 176ab58..bead402 100644\n--- a/Jenkinsfile\n+++ b/Jenkinsfile\n@@ -326,7 +326,7 @@ pipeline {\n TAG = \"${env.VERSION}-${env.GIT_COMMIT}\"\n DOCKER_GCR = credentials(\"zeebe-gcr-serviceaccount-json\")\n ZEEBE_AUTHORIZATION_SERVER_URL = 'https://login.cloud.ultrawombat.com/oauth/token'\n- ZEEBE_CLIENT_ID = 'W5a4JUc3I1NIetNnodo3YTvdsRIFb12w'\n+ ZEEBE_CLIENT_ID = 'ELL8eP0qDkl6dxXVps0t51x2VkCkWf~p'\n QA_RUN_VARIABLES = \"{\\\"zeebeImage\\\": \\\"${env.IMAGE}:${env.TAG}\\\", \\\"generationTemplate\\\": \\\"${params.GENERATION_TEMPLATE}\\\", \" +\n \"\\\"channel\\\": \\\"Internal Dev\\\", \\\"branch\\\": \\\"${env.BRANCH_NAME}\\\", \\\"build\\\": \\\"${currentBuild.absoluteUrl}\\\", \" +\n \"\\\"businessKey\\\": \\\"${currentBuild.absoluteUrl}\\\", \\\"processId\\\": \\\"qa-protocol\\\"}\"\n@@ -341,7 +341,7 @@ pipeline {\n withVault(\n [vaultSecrets:\n [\n- [path : 'secret/common/ci-zeebe/testbench-secrets-int',\n+ [path : 'secret/common/ci-zeebe/testbench-secrets-1.x-prod',\n secretValues:\n [\n [envVar: 'ZEEBE_CLIENT_SECRET', vaultKey: 'clientSecret'],\n"]
| 1 |
["c81a0c2999454c859b4bf4da5779712960d239be"]
|
["cicd"]
|
set Opensearch version to 2.5.0
We use Opensearch 2.5.0 in our dependencies. This is tied to the
Opensearch versions of other component of the platform.
This Docker compose file is only used for local testing. Let's make sure
we test on the actual version we support.
|
["diff --git a/exporters/opensearch-exporter/docker-compose.yml b/exporters/opensearch-exporter/docker-compose.yml\nindex 8fe84b3..647afa1 100644\n--- a/exporters/opensearch-exporter/docker-compose.yml\n+++ b/exporters/opensearch-exporter/docker-compose.yml\n@@ -2,7 +2,7 @@ version: '3'\n \n services:\n opensearch:\n- image: opensearchproject/opensearch:2.6.0\n+ image: opensearchproject/opensearch:2.5.0\n ports:\n - \"9200:9200\"\n - \"9600:9600\"\n@@ -14,7 +14,7 @@ services:\n - opensearch-net\n \n opensearch-dashboards:\n- image: opensearchproject/opensearch-dashboards:2.6.0\n+ image: opensearchproject/opensearch-dashboards:2.5.0\n ports:\n - \"5601:5601\"\n expose:\n"]
| 1 |
["b7beb4d8cf19bbb7b72997a8276300a786e4fb5e"]
|
["build"]
|
await job creation to ensure asserted event sequence
|
["diff --git a/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java b/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\nindex 9ffa1fa..4333db0 100644\n--- a/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\n+++ b/engine/src/test/java/io/zeebe/engine/processor/workflow/boundary/BoundaryEventTest.java\n@@ -114,12 +114,18 @@ public class BoundaryEventTest {\n ENGINE.deployment().withXmlResource(MULTIPLE_SEQUENCE_FLOWS).deploy();\n final long workflowInstanceKey = ENGINE.workflowInstance().ofBpmnProcessId(PROCESS_ID).create();\n \n- // when\n RecordingExporter.timerRecords()\n .withHandlerNodeId(\"timer\")\n .withIntent(TimerIntent.CREATED)\n .withWorkflowInstanceKey(workflowInstanceKey)\n .getFirst();\n+\n+ RecordingExporter.jobRecords(JobIntent.CREATED)\n+ .withType(\"type\")\n+ .withWorkflowInstanceKey(workflowInstanceKey)\n+ .getFirst();\n+\n+ // when\n ENGINE.increaseTime(Duration.ofMinutes(1));\n \n // then\n"]
| 1 |
["a8d1a60fd48d3fbd76d4271987a1b0f538d498f1"]
|
["test"]
|
do not check mkdocs for older versions used in deployments
|
["diff --git a/.github/workflows/ibis-docs-lint.yml b/.github/workflows/ibis-docs-lint.yml\nindex 57d94a4..04de03b 100644\n--- a/.github/workflows/ibis-docs-lint.yml\n+++ b/.github/workflows/ibis-docs-lint.yml\n@@ -206,7 +206,7 @@ jobs:\n - name: build and push dev docs\n run: |\n nix develop --ignore-environment -c \\\n- mkdocs gh-deploy --message 'docs: ibis@${{ github.sha }}'\n+ mkdocs gh-deploy --message 'docs: ibis@${{ github.sha }}' --ignore-version\n \n simulate_release:\n runs-on: ubuntu-latest\n"]
| 1 |
["21228c55b7045d9b2225f65e6231184ff332b071"]
|
["cicd"]
|
support document.html
|
["diff --git a/packages/nuxt3/src/builder/builder.ts b/packages/nuxt3/src/builder/builder.ts\nindex a24bd88..ecc22ef 100644\n--- a/packages/nuxt3/src/builder/builder.ts\n+++ b/packages/nuxt3/src/builder/builder.ts\n@@ -3,6 +3,7 @@ import fsExtra from 'fs-extra'\n import { debounce } from 'lodash'\n import { BundleBuilder } from 'src/webpack'\n import { Nuxt } from '../core'\n+import { DeterminedGlobals, determineGlobals } from '../utils'\n import {\n templateData,\n compileTemplates,\n@@ -15,12 +16,14 @@ import Ignore from './ignore'\n \n export class Builder {\n nuxt: Nuxt\n+ globals: DeterminedGlobals\n ignore: Ignore\n- app: NuxtApp\n templates: NuxtTemplate[]\n+ app: NuxtApp\n \n constructor (nuxt) {\n this.nuxt = nuxt\n+ this.globals = determineGlobals(nuxt.options.globalName, nuxt.options.globals)\n this.ignore = new Ignore({\n rootDir: nuxt.options.srcDir,\n ignoreArray: nuxt.options.ignore.concat(\n@@ -32,6 +35,10 @@ export class Builder {\n build () {\n return build(this)\n }\n+\n+ close () {\n+ // TODO: close watchers\n+ }\n }\n \n // Extends VueRouter\ndiff --git a/packages/nuxt3/src/builder/template.ts b/packages/nuxt3/src/builder/template.ts\nindex 63a9115..fe09f16 100644\n--- a/packages/nuxt3/src/builder/template.ts\n+++ b/packages/nuxt3/src/builder/template.ts\n@@ -11,6 +11,7 @@ export interface NuxtTemplate {\n \n export function templateData (builder) {\n return {\n+ globals: builder.globals,\n app: builder.app\n }\n }\ndiff --git a/packages/nuxt3/src/builder/watch.ts b/packages/nuxt3/src/builder/watch.ts\nindex b4d1415..d148fec 100644\n--- a/packages/nuxt3/src/builder/watch.ts\n+++ b/packages/nuxt3/src/builder/watch.ts\n@@ -38,7 +38,8 @@ export function createWatcher (\n return {\n watchAll,\n watch,\n- debug\n+ debug,\n+ close: () => watcher.close()\n }\n }\n \ndiff --git a/packages/nuxt3/src/config/options.ts b/packages/nuxt3/src/config/options.ts\nindex 5aac8ac..6e7f93c 100644\n--- a/packages/nuxt3/src/config/options.ts\n+++ b/packages/nuxt3/src/config/options.ts\n@@ -12,7 +12,7 @@ import { DefaultConfiguration, defaultNuxtConfigFile, getDefaultNuxtConfig } fro\n import { deleteProp, mergeConfigs, setProp, overrideProp, Optional } from './transformers'\n \n interface InputConfiguration {\n- appTemplatePath?: string\n+ documentPath?: string\n layoutTransition?: string | DefaultConfiguration['layoutTransition']\n loading?: true | false | DefaultConfiguration['loading']\n manifest?: {\n@@ -197,13 +197,16 @@ function normalizeConfig (_options: CliConfiguration) {\n .concat(options.extensions))\n \n // If app.html is defined, set the template path to the user template\n- if (options.appTemplatePath === undefined) {\n- options.appTemplatePath = path.resolve(options.buildDir, 'views/app.template.html')\n- if (fs.existsSync(path.join(options.srcDir, 'app.html'))) {\n- options.appTemplatePath = path.join(options.srcDir, 'app.html')\n+ if (options.documentPath === undefined) {\n+ options.documentPath = path.resolve(options.buildDir, 'views/document.template.html')\n+ const userDocumentPath = path.join(options.srcDir, 'document.html')\n+ if (fs.existsSync(userDocumentPath)) {\n+ options.documentPath = userDocumentPath\n+ } else {\n+ options.watch.push(userDocumentPath)\n }\n } else {\n- options.appTemplatePath = path.resolve(options.srcDir, options.appTemplatePath)\n+ options.documentPath = path.resolve(options.srcDir, options.documentPath)\n }\n \n overrideProp(options.build, 'publicPath', options.build.publicPath.replace(/([^/])$/, '$1/'))\ndiff --git a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\nindex 3e3ce2d..482bd6b 100644\n--- a/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n+++ b/packages/nuxt3/src/vue-renderer/renderers/ssr.ts\n@@ -96,6 +96,9 @@ export default class SSRRenderer extends BaseRenderer {\n // Call Vue renderer renderToString\n let APP = await this.vueRenderer.renderToString(renderContext)\n \n+ // Wrap with Nuxt id\n+ APP = `<div id=\"${this.serverContext.globals.id}\">${APP}</div>`\n+\n // Call render:done in app\n await renderContext.nuxt.hooks.callHook('vue-renderer:done')\n \ndiff --git a/packages/nuxt3/src/webpack/configs/client.ts b/packages/nuxt3/src/webpack/configs/client.ts\nindex a257948..4fb35e0 100644\n--- a/packages/nuxt3/src/webpack/configs/client.ts\n+++ b/packages/nuxt3/src/webpack/configs/client.ts\n@@ -94,7 +94,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.ssr.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: false // Resources will be injected using bundleRenderer\n })\n@@ -104,7 +104,7 @@ function clientHTML (ctx: WebpackConfigContext) {\n config.plugins.push(\n new HTMLPlugin({\n filename: '../server/index.spa.html',\n- template: options.appTemplatePath,\n+ template: options.documentPath,\n minify: options.build.html.minify as any,\n inject: true\n })\n"]
| 1 |
["09476134eeeb12c025618919ab9a795a680a9b30"]
|
["feat"]
|
return Animated nodes passed to "getAnimated"
...instead of undefined.
Also, stop using instanceof in "isAnimated" for perf.
|
["diff --git a/packages/animated/src/Animated.ts b/packages/animated/src/Animated.ts\nindex 00daa96..05ff7f9 100644\n--- a/packages/animated/src/Animated.ts\n+++ b/packages/animated/src/Animated.ts\n@@ -4,7 +4,7 @@ import { AnimatedValue } from './AnimatedValue'\n const $node: any = Symbol.for('Animated:node')\n \n export const isAnimated = (value: any): value is Animated =>\n- value instanceof Animated\n+ !!value && value[$node] === value\n \n /** Get the owner's `Animated` node. */\n export const getAnimated = (owner: any): Animated | undefined =>\n@@ -23,6 +23,10 @@ export abstract class Animated<T = any> {\n protected payload?: Payload\n \n /** Returns every value of the node. Pass true for only the animated values. */\n+ constructor() {\n+ setAnimated(this, this)\n+ }\n+\n abstract getValue(animated?: boolean): T\n \n abstract setValue(value: T): void\n"]
| 1 |
["eb513f7eeea7865f15e5bd561a471d1f4381ea70"]
|
["fix"]
|
add missing region to cloudformation_stack_set
|
["diff --git a/internal/providers/terraform/aws/cloudformation_stack_set.go b/internal/providers/terraform/aws/cloudformation_stack_set.go\nindex 6720caa..e752b79 100644\n--- a/internal/providers/terraform/aws/cloudformation_stack_set.go\n+++ b/internal/providers/terraform/aws/cloudformation_stack_set.go\n@@ -12,7 +12,7 @@ func getCloudFormationStackSetRegistryItem() *schema.RegistryItem {\n \t}\n }\n func NewCloudformationStackSet(d *schema.ResourceData, u *schema.UsageData) *schema.Resource {\n-\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address)}\n+\tr := &aws.CloudformationStackSet{Address: strPtr(d.Address), Region: strPtr(d.Get(\"region\").String())}\n \tif !d.IsEmpty(\"template_body\") {\n \t\tr.TemplateBody = strPtr(d.Get(\"template_body\").String())\n \t}\n"]
| 1 |
["304d0588f634e9e72087a706367c53af9c7f7180"]
|
["fix"]
|
fix pagination spacing
|
["diff --git a/website/layouts/Base.tsx b/website/layouts/Base.tsx\nindex 22d36a2..40f7130 100644\n--- a/website/layouts/Base.tsx\n+++ b/website/layouts/Base.tsx\n@@ -399,7 +399,7 @@ export function Base({ children, headings }: BaseProps) {\n >\n <a className=\"flex items-center space-x-4 group\">\n <ArrowLeftIcon className=\"h-4 transition-transform duration-100 ease-in-out transform group-hover:-translate-x-1\" />\n- <div className=\"flex flex-col space-x-1\">\n+ <div className=\"flex flex-col space-y-1\">\n <span className=\"text-sm text-gray-500 transition-colors duration-100 ease-in-out group-hover:text-gray-700\">\n Previous\n </span>\n@@ -418,7 +418,7 @@ export function Base({ children, headings }: BaseProps) {\n aria-label={`Go to ${next.resource?.label}`}\n >\n <a className=\"flex items-center space-x-4 group\">\n- <div className=\"flex flex-col space-x-1\">\n+ <div className=\"flex flex-col space-y-1\">\n <span className=\"text-sm text-gray-500 transition-colors duration-100 ease-in-out group-hover:text-gray-700\">\n Next\n </span>\n"]
| 1 |
["1e05a24486f15889ddf6bf1c711ea2bbffc1a88e"]
|
["fix"]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.