images
images listlengths
1
1
texts
listlengths
1
1
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C++_>// Copyright (c) 2012-2013 The Bitcoin Core developers\n// Distributed under the MIT software license, see the accompanying\n// file COPYING or http://www.opensource.org/licenses/mit-license.php.\n#include \"addrman.h\"\n#include \"test/test_domo.h\"\n#include <string>\n#include <boost/test/unit_test.hpp>\n#include <crypto/common.h> // for ReadLE64\n\n#include \"hash.h\"\n#include \"random.h\"\n\nclass CAddrManTest : public CAddrMan\n{\n uint64_t state;\n\npublic:\n CAddrManTest()\n {\n state = 1;\n }\n\n //! Ensure that bucket placement is always the same for testing purposes.\n void MakeDeterministic()\n {\n nKey.SetNull();\n insecure_rand = FastRandomContext(true);\n }\n\n int RandomInt(int nMax)\n {\n state = ReadLE64((CHashWriter(SER_GETHASH, 0) << state).GetHash().begin());\n return (unsigned int)(state % nMax);\n }\n\n CAddrInfo* Find(const CNetAddr& addr, int* pnId = NULL)\n {\n return CAddrMan::Find(addr, pnId);\n }\n\n CAddrInfo* Create(const CAddress& addr, const CNetAddr& addrSource, int* pnId = NULL)\n {\n return CAddrMan::Create(addr, addrSource, pnId);\n }\n\n void Delete(int nId)\n {\n CAddrMan::Delete(nId);\n }\n};\n\nBOOST_FIXTURE_TEST_SUITE(addrman_tests, BasicTestingSetup)\n\nBOOST_AUTO_TEST_CASE(addrman_simple)\n{\n CAddrManTest addrman;\n\n // Set addrman addr placement to be deterministic.\n addrman.MakeDeterministic();\n\n CNetAddr source = CNetAddr(\"252.2.2.2\");\n\n // Test 1: Does Addrman respond correctly when empty.\n BOOST_CHECK(addrman.size() == 0);\n CAddrInfo addr_null = addrman.Select();\n BOOST_CHECK(addr_null.ToString() == \"[::]:0\");\n\n // Test 2: Does Addrman::Add work as expected.\n CService addr1 = CService(\"250.1.1.1\", 8333);\n addrman.Add(CAddress(addr1), source);\n BOOST_CHECK(addrman.size() == 1);\n CAddrInfo addr_ret1 = addrman.Select();\n BOOST_CHECK(addr_ret1.ToString() == \"250.1.1.1:8333\");\n\n // Test 3: Does IP address deduplication work correctly.\n // Expected dup IP should not be added.\n CService addr1_dup = CService(\"250.1.1.1\", 8333);\n addrman.Add(CAddress(addr1_dup), source);\n BOOST_CHECK(addrman.size() == 1);\n\n\n // Test 5: New table has one addr and we add a diff addr we should\n // have two addrs.\n CService addr2 = CService(\"250.1.1.2\", 8333);\n addrman.Add(CAddress(addr2), source);\n BOOST_CHECK(addrman.size() == 2);\n\n // Test 6: AddrMan::Clear() should empty the new table.\n addrman.Clear();\n BOOST_CHECK(addrman.size() == 0);\n CAddrInfo addr_null2 = addrman.Select();\n BOOST_CHECK(addr_null2.ToString() == \"[::]:0\");\n}\n\nBOOST_AUTO_TEST_CASE(addrman_ports)\n{\n CAddrManTest addrman;\n\n // Set addrman addr placement to be deterministic.\n addrman.MakeDeterministic();\n\n CNetAddr source = CNetAddr(\"252.2.2.2\");\n\n BOOST_CHECK(addrman.size() == 0);\n\n // Test 7; Addr with same IP but diff port does not replace existing addr.\n CService addr1 = CService(\"250.1.1.1\", 8333);\n addrman.Add(CAddress(addr1), source);\n BOOST_CHECK(addrman.size() == 1);\n\n CService addr1_port = CService(\"250.1.1.1\", 8334);\n addrman.Add(CAddress(addr1_port), source);\n BOOST_CHECK(addrman.size() == 1);\n CAddrInfo addr_ret2 = addrman.Select();\n BOOST_CHECK(addr_ret2.ToString() == \"250.1.1.1:8333\");\n\n // Test 8: Add same IP but diff port to tried table, it doesn't get added.\n // Perhaps this is not ideal behavior but it is the current behavior.\n addrman.Good(CAddress(addr1_port));\n BOOST_CHECK(addrman.size() == 1);\n bool newOnly = true;\n CAddrInfo addr_ret3 = addrman.Select(newOnly);\n BOOST_CHECK(addr_ret3.ToString() == \"250.1.1.1:8333\");\n}\n\n\nBOOST_AUTO_TEST_CASE(addrman_select)\n{\n CAddrManTest addrman;\n\n // Set addrman addr placement to be deterministic.\n addrman.MakeDeterministic();\n\n CNetAddr source = CNetAddr(\"252.2.2.2\");\n\n // Test 9: Select from new with 1 addr in new.\n CService addr1 = CService(\"250.1.1.1\", 8333);\n addrman.Add(CAddress(addr1), source);\n BOOST_CHECK(addrman.size() == 1);\n\n bool newOnly = true;\n CAddrInfo addr_ret1 = addrman.Select(newOnly);\n BOOST_CHECK(addr_ret1.ToString() == \"250.1.1.1:8333\");\n\n // Test 10: move addr to tried, select from new expected nothing returned.\n addrman.Good(CAddress(addr1));\n BOOST_CHECK(addrman.size() == 1);\n CAddrInfo addr_ret2 = addrman.Select(newOnly);\n BOOST_CHECK(addr_ret2.ToString() == \"[::]:0\");\n\n CAddrInfo addr_ret3 = addrman.Select();\n BOOST_CHECK(addr_ret3.ToString() == \"250.1.1.1:8333\");\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C++_>// Copyright 2014 Samsung Electronics. All rights reserved.\n// Use of this source code is governed by a BSD-style license that can be\n// found in the LICENSE file.\n\n#include \"utc_blink_ewk_base.h\"\n\nclass utc_blink_ewk_back_forward_list_item_unref : public utc_blink_ewk_base\n{\nprotected:\n void LoadFinished(Evas_Object *) {\n EventLoopStop(Success);\n }\n\nprotected:\n static const char* const TEST_URL1;\n static const char* const TEST_URL2;\n static const char* const TEST_URL3;\n};\n\nconst char* const utc_blink_ewk_back_forward_list_item_unref::TEST_URL1 = \"ewk_history/page1.html\";\nconst char* const utc_blink_ewk_back_forward_list_item_unref::TEST_URL2 = \"ewk_history/page2.html\";\nconst char* const utc_blink_ewk_back_forward_list_item_unref::TEST_URL3 = \"ewk_history/page3.html\";\n\nTEST_F(utc_blink_ewk_back_forward_list_item_unref, POS_TEST)\n{\n // load 3 pages to get some interesting history\n ASSERT_EQ(EINA_TRUE, ewk_view_url_set(GetEwkWebView(), GetResourceUrl(TEST_URL1).c_str()));\n ASSERT_EQ(Success, EventLoopStart());\n ASSERT_EQ(EINA_TRUE, ewk_view_url_set(GetEwkWebView(), GetResourceUrl(TEST_URL2).c_str()));\n ASSERT_EQ(Success, EventLoopStart());\n\n // get back-forward list\n Ewk_Back_Forward_List *list = ewk_view_back_forward_list_get(GetEwkWebView());\n ASSERT_TRUE(list);\n\n // get current item and check URL, original URL and title\n Ewk_Back_Forward_List_Item *item = ewk_back_forward_list_current_item_get(list);\n ASSERT_STREQ(GetResourceUrl(TEST_URL2).c_str(), ewk_back_forward_list_item_url_get(item));\n\n // increase ref count\n item = ewk_back_forward_list_item_ref(item);\n ASSERT_TRUE(item);\n\n // go back\n ASSERT_EQ(EINA_TRUE, ewk_view_back(GetEwkWebView()));\n ASSERT_EQ(Success, EventLoopStart());\n\n // load page so the refed item should normally be deleted but as it is refed\n // it should be still ins the memory</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_>a,b = map(int,input().split())\nprint(max(a-2*b, 0))</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Ruby_> # Import credentials given a path to a valid manifest file\n #\n # @param creds_dump_manifest_path [String]\n # @param workspace [Mdm::Workspace] Default: {#workspace}\n # @return [void]\n def import_msf_cred_dump(creds_dump_manifest_path, workspace)\n manifest_file = File.open(creds_dump_manifest_path)\n origin = Metasploit::Credential::Origin::Import.create!(filename: File.basename(creds_dump_manifest_path))\n importer = Metasploit::Credential::Importer::Core.new(workspace: workspace, input: manifest_file, origin: origin)\n importer.import!\n end\n\n # Import credentials given a path to a valid manifest file\n #\n # @option args [String] :filename\n # @option args [Mdm::Workspace] :workspace Default: {#workspace}\n # @return [void]\n def import_msf_cred_dump_zip(args = {})\n wspace = Msf::Util::DBManager.process_opts_workspace(args, framework)\n origin = Metasploit::Credential::Origin::Import.create!(filename: File.basename(args[:filename]))\n importer = Metasploit::Credential::Importer::Zip.new(workspace: wspace, input: File.open(args[:filename]), origin: origin)\n importer.import!\n nil\n end\n\n # Perform in an import of an msfpwdump file\n def import_msf_pwdump(args={}, &block)\n filename = File.basename(args[:data].path)\n wspace = Msf::Util::DBManager.process_opts_workspace(args, framework)\n origin = Metasploit::Credential::Origin::Import.create!(filename: filename)\n importer = Metasploit::Credential::Importer::Pwdump.new(input: args[:data], workspace: wspace, filename: filename, origin:origin)\n importer.import!\n importer.input.close unless importer.input.closed?\n end\nend\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Ruby_># frozen_string_literal: true\n\n# This should be included on any Replicator which implements verification.\n#\n# Expected let variables:\n#\n# - primary\n# - secondary\n# - model_record\n# - replicator\n#\nRSpec.shared_examples 'a verifiable replicator' do\n include EE::GeoHelpers\n\n describe 'events' do\n it 'has checksum_succeeded event' do\n expect(described_class.supported_events).to include(:checksum_succeeded)\n end\n end\n\n describe '.verification_enabled?' do\n context 'when replication is enabled' do\n before do\n expect(described_class).to receive(:enabled?).and_return(true)\n end\n\n context 'when verification_feature_flag_enabled? returns true' do\n it 'returns true' do\n allow(described_class).to receive(:verification_feature_flag_enabled?).and_return(true)\n\n expect(described_class.verification_enabled?).to be_truthy\n end\n end\n\n context 'when verification_feature_flag_enabled? returns false' do\n it 'returns false' do\n allow(described_class).to receive(:verification_feature_flag_enabled?).and_return(false)\n\n expect(described_class.verification_enabled?).to be_falsey\n end</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_JavaScript_>function main(input) {\n var temp = input.split('\\n');\n\n for (var i =0; i < temp.length; i++) {\n if(parseInt(temp[i]) == 0){\n break;\n }\n console.log(\"Case\"+' '+(i+1)+\": \"+temp[i]);\n }\n}\n\nvar x = require('fs').readFileSync('/dev/stdin', 'utf-8');\nmain(x);\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Java_>package com.odysee.app.utils;\n\nimport android.net.Uri;\n\nimport org.json.JSONArray;\nimport org.json.JSONException;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.HashMap;\nimport java.util.List;\nimport java.util.Map;\n\nimport com.odysee.app.exceptions.LbryRequestException;\nimport com.odysee.app.exceptions.LbryResponseException;\nimport com.odysee.app.model.Claim;\nimport com.odysee.app.model.UrlSuggestion;\nimport okhttp3.OkHttpClient;\nimport okhttp3.Request;\nimport okhttp3.Response;\nimport okhttp3.ResponseBody;\n\npublic class Lighthouse {\n public static final String CONNECTION_STRING = \"https://lighthouse.odysee.com\";\n public static final Map<String, List<UrlSuggestion>> autocompleteCache = new HashMap<>();\n public static final Map<Map<String, Object>, List<Claim>> searchCache = new HashMap<>();\n\n private static Map<String, Object> buildSearchOptionsKey(String rawQuery, int size, int from, boolean nsfw, String relatedTo) {\n Map<String, Object> options = new HashMap<>();\n options.put(\"s\", rawQuery);\n options.put(\"size\", size);\n options.put(\"from\", from);\n options.put(\"nsfw\", nsfw);\n if (!Helper.isNullOrEmpty(relatedTo)) {\n options.put(\"related_to\", relatedTo);\n }\n return options;\n }\n\n public static List<Claim> search(String rawQuery, int size, int from, boolean nsfw, String relatedTo) throws LbryRequestException, LbryResponseException {\n Uri.Builder uriBuilder = Uri.parse(String.format(\"%s/search\", CONNECTION_STRING)).buildUpon().\n appendQueryParameter(\"s\", rawQuery).\n appendQueryParameter(\"resolve\", \"true\").\n appendQueryParameter(\"size\", String.valueOf(size)).\n appendQueryParameter(\"from\", String.valueOf(from));\n if (!nsfw) {\n uriBuilder.appendQueryParameter(\"nsfw\", String.valueOf(nsfw).toLowerCase());\n }\n if (!Helper.isNullOrEmpty(relatedTo)) {\n uriBuilder.appendQueryParameter(\"related_to\", relatedTo);\n }\n\n Map<String, Object> cacheKey = buildSearchOptionsKey(rawQuery, size, from, nsfw, relatedTo);</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C_> * on Wednesday, April 14, 2021 at 2:25:22 PM Mountain Standard Time</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_SQL_>--+ holdcas on;\n--- date\nset system parameters 'return_null_on_function_errors=yes';\ncreate table t1 (d1 date);\n\ninsert into t1 values (date'2001-10-11');\n\n\nselect -(d1) from t1;\n\ndrop table t1;\n\n\nselect -(date'2001-10-10');\n\nprepare st from 'select -(?)'\nexecute st using date'2001-10-11';\n\n\nset system parameters 'return_null_on_function_errors=no';commit;\n--+ holdcas off;\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Ruby_>module Pyper::Pipes\n # @param attr_map [Hash] A map of old field names to new field names, which will be used to rename attributes.\n class FieldRename < Struct.new(:attr_map)\n\n # @param args [Hash|Enumerator<Hash>] One or more item hashes\n # @param status [Hash] The mutable status field\n # @return [Hash|Enumerator<Hash>] The item(s) with fields renamed\n def pipe(attrs_or_items, status = {})\n case attrs_or_items\n when Hash then rename(attrs_or_items)\n else attrs_or_items.map { |item| rename(item) }\n end\n end\n\n def rename(item)\n attr_map.each do |old,new|\n item[new.to_sym] = item.delete(old.to_sym) if item.has_key?(old.to_sym)\n item[new.to_s] = item.delete(old.to_s) if item.has_key?(old.to_s)\n end\n item\n end\n end\nend\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>#!python3.4.3\n\nn = input()\na_list = list(map(int, input().split(' ')))\n \nresult = 'APPROVED'\n \nfor a in a_list:\n if a%2 == 0 and (a%3 != 0 and a%5 != 0):\n result = 'DENIED'\n break\n \nprint(result)</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Perl_>#!/usr/bin/env perl\n\n# ***** BEGIN LICENSE BLOCK *****\n# Version: MPL 1.1/GPL 2.0/LGPL 2.1\n#\n# The contents of this file are subject to the Mozilla Public License Version\n# 1.1 (the \"License\"); you may not use this file except in compliance with\n# the License. You may obtain a copy of the License at\n# http://www.mozilla.org/MPL/\n#\n# Software distributed under the License is distributed on an \"AS IS\" basis,\n# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License\n# for the specific language governing rights and limitations under the\n# License.\n#\n# The Original Code is this file as it was released upon December 26, 2000.\n#\n# The Initial Developer of the Original Code is\n# Netscape Communications Corporation.\n# Portions created by the Initial Developer are Copyright (C) 2000\n# the Initial Developer. All Rights Reserved.\n#\n# Contributor(s):\n# Christopher Seawood <cls@seawood.org>\n# Joey Armstrong <joey@mozilla.com>\n#\n# Alternatively, the contents of this file may be used under the terms of\n# either of the GNU General Public License Version 2 or later (the \"GPL\"),\n# or the GNU Lesser General Public License Version 2.1 or later (the \"LGPL\"),\n# in which case the provisions of the GPL or the LGPL are applicable instead\n# of those above. If you wish to allow use of your version of this file only\n# under the terms of either the GPL or the LGPL, and not to allow others to\n# use your version of this file under the terms of the MPL, indicate your\n# decision by deleting the provisions above and replace them with the notice\n# and other provisions required by the GPL or the LGPL. If you do not delete\n# the provisions above, a recipient may use your version of this file under\n# the terms of any one of the MPL, the GPL or the LGPL.\n#\n# ***** END LICENSE BLOCK *****\n\n##----------------------------##\n##---] CORE/CPAN INCLUDES [---##\n##----------------------------##\nuse strict;\nuse warnings;\nuse Getopt::Long;\n\n##-------------------##\n##---] EXPORTS [---##\n##-------------------##\nour $VERSION = qw(1.1);\n\n##-------------------##\n##---] GLOBALS [---##\n##-------------------##\nmy %argv;\nmy $modver = $Getopt::Long::VERSION || 0;\nmy $isOldGetopt = ($modver eq '2.25') ? 1 : 0;\n\n###########################################################################\n## Intent: Script init function\n###########################################################################\nsub init\n{\n if ($isOldGetopt)\n {\n\t# mozilla.build/mingw perl in need of an upgrade\n\t# emulate Getopt::Long switch|short:init\n\tforeach (qw(debug regex sort))\n\t{\n\t if (defined($argv{$_}))\n\t {\n\t\t$argv{$_} ||= 1;\n\t }\n\t}\n }\n} # init\n\n##----------------##\n##---] MAIN [---##\n##----------------##\nmy @args = ($isOldGetopt)\n ? qw(debug|d regex|r sort|s)\n : qw(debug|d:1 regex|r:1 sort|s:1)\n ;\n\nunless(GetOptions(\\%argv, @args))\n{\n print \"Usage: $0\\n\";\n print \" --sort Sort list elements early\\n\";\n print \" --regex Exclude subdirs by pattern\\n\";\n}\n\ninit();\nmy $debug = $argv{debug} || 0;\n\nmy %seen;\nmy @out;\nmy @in = ($argv{sort}) ? sort @ARGV : @ARGV;\n\nforeach my $d (@in)\n{\n next if ($seen{$d}++);\n\n print \" arg is $d\\n\" if ($debug);\n\n if ($argv{regex})\n {\n my $found = 0;\n foreach my $dir (@out)\n\t{\n\t my $dirM = quotemeta($dir);\n $found++, last if ($d eq $dir || $d =~ m!^${dirM}\\/!);\n }\n\tprint \"Adding $d\\n\" if ($debug && !$found);\n push @out, $d if (!$found);\n } else {\n\tprint \"Adding: $d\\n\" if ($debug);\n push(@out, $d);\n }\n}\n\nprint \"@out\\n\"\n\n# EOF\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C#_>using System;\nusing System.Collections.Generic;\n\nnamespace ConsoleApplication1\n{\n class Program\n {\n static void Main(string[] args)\n {\n\n var list = new List<int>();\n int s;\n\n do\n {\n s = int.Parse(Console.ReadLine());\n list.Add(s);\n } while (s != 0);\n\n for (int i = 0; i < list.Count-1; i++)\n {\n Console.WriteLine(\"Case {0}: {1}\", i+1, list[i]);\n }\n }\n }\n}\n\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_D_>import std.stdio, std.string, std.algorithm, std.array, std.range, std.conv,\n std.typecons, std.math, std.container, std.format, std.numeric;\n\nclass SegmentTree\n{\nprivate:\n long[] xs;\n long size;\npublic:\n this(long size)\n {\n long i = 1;\n while (i <= size)\n {\n i *= 2;\n }\n this.size = size;\n this.xs = new long[](i * 2);\n }\n\n void add(long l, long r, long x)\n {\n l += this.size;\n r += this.size;\n while (l < r)\n {\n if (l % 2 == 1)\n {\n xs[l] += x;\n l++;\n }\n l /= 2;\n if (r % 2 == 1)\n {\n xs[r - 1] += x;\n r--;\n }\n r /= 2;\n }\n }\n\n long get(long i)\n {\n i += this.size;\n long ans = xs[i];\n while (true)\n {\n i /= 2;\n if (i == 0)\n {\n break;\n }\n ans += xs[i];\n }\n return ans;\n }\n}\n\nvoid main(string[] args)\n{\n long n, q;\n readf(\"%d %d\\n\", &n, &q);\n\n auto seg = new SegmentTree(n);\n foreach (i; 0 .. q)\n {\n long cmd, s, t, x;\n readf(\"%d \", &cmd);\n\n if (cmd == 0)\n {\n readf(\"%d %d %d\\n\", &s, &t, &x);\n seg.add(s, t + 1, x);\n }\n else\n {\n readf(\"%d\\n\", &x);\n writeln(seg.get(x));\n }\n }\n\n}\n\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C++_>/* Tests the generic code for dealing with error objects.\n * These tests don't really have anything to do with USB. */\n\n#include <test_helper.h>\n\n#ifndef NDEBUG\n\nTEST_CASE(\"error class basic properties\")\n{\n SECTION(\"can be caught as a const std::exception &\")\n {\n try\n {\n throw libusbp::error();\n REQUIRE(0);\n }\n catch(const std::exception & e)\n {\n REQUIRE(1);\n CHECK(std::string(e.what()) == \"No error.\");\n }\n }\n\n SECTION(\"can be moved without getting copied\")\n {\n // This isn't really necessary for a light-weight error object, but it\n // is a feature that unique_pointer_wrapper_with_copy is supposed to\n // provide, so we want to test it on at least one of the classes that\n // uses unique_pointer_wrapper.\n\n libusbp::error error(error_create(\"hi\"));\n libusbp_error * p = error.pointer_get();\n REQUIRE(p);\n\n // Move constructor.\n libusbp::error error2 = std::move(error);\n REQUIRE(error2.pointer_get() == p);\n REQUIRE_FALSE(error);\n\n // Move assignment.\n error = std::move(error2);\n REQUIRE(error.pointer_get() == p);\n REQUIRE_FALSE(error2);\n }\n}\n\n\nTEST_CASE(\"null error\", \"[null_error]\")\n{\n libusbp::error error;\n\n SECTION(\"can also be constructed by passing in NULL\")\n {\n libusbp::error error(NULL);\n }\n\n SECTION(\"has a default message\")\n {\n REQUIRE(error.message() == \"No error.\");\n }\n\n SECTION(\"is not present\")\n {\n REQUIRE_FALSE(error);\n }\n\n SECTION(\"can be copied\")\n {\n libusbp::error error2 = error;\n REQUIRE_FALSE(error2);\n }\n\n SECTION(\"has no error codes, and you can not add codes to it\")\n {\n REQUIRE_FALSE(error.has_code(1));\n }\n}\n\nTEST_CASE(\"error_create\", \"[error_create]\")\n{\n SECTION(\"creates a non-null error\")\n {\n libusbp::error error(error_create(\"Error1 %d.\", 123));\n REQUIRE(error);\n }\n\n SECTION(\"properly formats its input\")\n {\n libusbp::error error(error_create(\"Error1 %d.\", 123));\n REQUIRE(error.message() == \"Error1 123.\");\n }\n}\n\nTEST_CASE(\"error_add\", \"[error_add]\")\n{\n SECTION(\"works with NULL\")\n {\n libusbp::error error(error_add(NULL, \"hi\"));\n REQUIRE(error.message() == \"hi\");\n }\n\n SECTION(\"preserves the message and error codes of errors passed to it\")\n {\n libusbp::error error(error_add(error_add_code(error_create(\"hi1\"), 7), \"hi2\"));\n CHECK(error.message() == \"hi2 hi1\");\n CHECK(error.has_code(7));\n }\n}\n\nTEST_CASE(\"error_add_code\", \"[error_add_code]\")\n{\n SECTION(\"works with NULL\")\n {\n libusbp::error error(error_add_code(NULL, 4));\n CHECK(error.message() == \"\");\n CHECK(error.has_code(4));\n }\n\n SECTION(\"preserves the message and error codes of errors passed to it\")\n {\n libusbp::error error(error_add_code(error_add_code(error_create(\"hi1\"), 7), 9));\n CHECK(error.message() == \"hi1\");\n CHECK(error.has_code(7));\n CHECK(error.has_code(9));\n }\n}\n\nTEST_CASE(\"error_no_memory\")\n{\n libusbp::error error(&error_no_memory);\n\n SECTION(\"has the right message\")\n {\n REQUIRE(error.message() == \"Failed to allocate memory.\");\n }\n\n SECTION(\"has the right code\")\n {</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Go_>//(C) Copyright [2020] Hewlett Packard Enterprise Development LP\n\n//\n\n//Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n\n//not use this file except in compliance with the License. You may obtain\n\n//a copy of the License at\n\n//\n\n// http://www.apache.org/licenses/LICENSE-2.0\n\n//\n\n//Unless required by applicable law or agreed to in writing, software\n\n//distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n\n//WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n\n//License for the specific language governing permissions and limitations\n\n// under the License.\n\npackage system\n\nimport (\n\t\"bytes\"\n\t\"encoding/json\"\n\t\"fmt\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"reflect\"\n\t\"strings\"\n\t\"testing\"\n\n\t\"github.com/ODIM-Project/ODIM/lib-utilities/common\"\n\t\"github.com/ODIM-Project/ODIM/lib-utilities/config\"\n\taggregatorproto \"github.com/ODIM-Project/ODIM/lib-utilities/proto/aggregator\"</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Rust_>test_modules!(\n token_capturing,\n selector_matching,\n element_content_replacement\n);\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_PHP_><?php\ndefined('BASEPATH') OR exit('No direct script access allowed');\n?><!DOCTYPE html>\n<html lang=\"en\">\n<head>\n\t<meta charset=\"utf-8\">\n\t<title>Welcome to CodeIgniter</title>\n\n\t<style type=\"text/css\">\n\n\t::selection { background-color: #E13300; color: white; }\n\t::-moz-selection { background-color: #E13300; color: white; }\n\n\tbody {\n\t\tbackground-color: #fff;\n\t\tmargin: 40px;\n\t\tfont: 13px/20px normal Helvetica, Arial, sans-serif;\n\t\tcolor: #4F5155;\n\t}\n\n\ta {\n\t\tcolor: #003399;\n\t\tbackground-color: transparent;\n\t\tfont-weight: normal;\n\t}\n\n\th1 {\n\t\tcolor: #444;\n\t\tbackground-color: transparent;\n\t\tborder-bottom: 1px solid #D0D0D0;\n\t\tfont-size: 19px;\n\t\tfont-weight: normal;\n\t\tmargin: 0 0 14px 0;\n\t\tpadding: 14px 15px 10px 15px;\n\t}\n\n\tcode {\n\t\tfont-family: Consolas, Monaco, Courier New, Courier, monospace;\n\t\tfont-size: 12px;\n\t\tbackground-color: #f9f9f9;\n\t\tborder: 1px solid #D0D0D0;\n\t\tcolor: #002166;\n\t\tdisplay: block;\n\t\tmargin: 14px 0 14px 0;\n\t\tpadding: 12px 10px 12px 10px;\n\t}\n\n\t#body {\n\t\tmargin: 0 15px 0 15px;\n\t}\n\n\tp.footer {\n\t\ttext-align: right;\n\t\tfont-size: 11px;\n\t\tborder-top: 1px solid #D0D0D0;\n\t\tline-height: 32px;</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_JavaScript_>\nmodule.exports = writeJson;</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_CSS_>/*!\n * froala_editor v2.8.0 (https://www.froala.com/wysiwyg-editor)\n * License https://froala.com/wysiwyg-editor/terms/\n * Copyright 2014-2018 Froala Labs\n */\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_HTML_><!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Transitional//EN\"\n \"http://www.w3.org/TR/html4/loose.dtd\"><!-- DO NOT EDIT THIS FILE-->\n<!-- Edit the .tex version instead-->\n\n<html>\n<head>\n<title>Binding Forms</title>\n<link href=\"csug.css\" rel=\"stylesheet\" type=\"text/css\">\n</head>\n<body>\n<a name=\"g36\"></a>\n<a name=\"./binding:h0\"></a>\n\n<h1>Chapter 5. Binding Forms<a name=\"CHPTBINDING\"></a></h1>\n\n\n\n\n\n<p>\nThis chapter describes <i>Chez&nbsp;Scheme</i> extensions to the set of Revised<sup>6</sup>\nReport binding forms.</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_> N = input()\n if N[0] == N[1] == N[2] or N[1] == N[2] == N[3]:\n print(\"Yes\")\n else:\n print(\"No\")</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C_>}\n\nstatic int unix_socketpair(struct socket *socka, struct socket *sockb)\n{\n\tstruct sock *ska = socka->sk, *skb = sockb->sk;\n\n\t/* Join our sockets back to back */\n\tsock_hold(ska);\n\tsock_hold(skb);\n\tunix_peer(ska) = skb;\n\tunix_peer(skb) = ska;\n\tinit_peercred(ska);\n\tinit_peercred(skb);\n\n\tif (ska->sk_type != SOCK_DGRAM) {\n\t\tska->sk_state = TCP_ESTABLISHED;\n\t\tskb->sk_state = TCP_ESTABLISHED;\n\t\tsocka->state = SS_CONNECTED;\n\t\tsockb->state = SS_CONNECTED;\n\t}\n\treturn 0;\n}\n\nstatic void unix_sock_inherit_flags(const struct socket *old,\n\t\t\t\t struct socket *new)\n{\n\tif (test_bit(SOCK_PASSCRED, &old->flags))\n\t\tset_bit(SOCK_PASSCRED, &new->flags);\n\tif (test_bit(SOCK_PASSSEC, &old->flags))\n\t\tset_bit(SOCK_PASSSEC, &new->flags);\n}\n\nstatic int unix_accept(struct socket *sock, struct socket *newsock, int flags,\n\t\t bool kern)\n{\n\tstruct sock *sk = sock->sk;\n\tstruct sock *tsk;\n\tstruct sk_buff *skb;\n\tint err;\n\n\terr = -EOPNOTSUPP;\n\tif (sock->type != SOCK_STREAM && sock->type != SOCK_SEQPACKET)\n\t\tgoto out;\n\n\terr = -EINVAL;\n\tif (sk->sk_state != TCP_LISTEN)\n\t\tgoto out;\n\n\t/* If socket state is TCP_LISTEN it cannot change (for now...),\n\t * so that no locks are necessary.\n\t */\n\n\tskb = skb_recv_datagram(sk, 0, flags&O_NONBLOCK, &err);\n\tif (!skb) {\n\t\t/* This means receive shutdown. */\n\t\tif (err == 0)\n\t\t\terr = -EINVAL;\n\t\tgoto out;\n\t}\n\n\ttsk = skb->sk;\n\tskb_free_datagram(sk, skb);\n\twake_up_interruptible(&unix_sk(sk)->peer_wait);\n\n\t/* attach accepted sock to socket */\n\tunix_state_lock(tsk);\n\tnewsock->state = SS_CONNECTED;\n\tunix_sock_inherit_flags(sock, newsock);\n\tsock_graft(tsk, newsock);\n\tunix_state_unlock(tsk);\n\treturn 0;\n\nout:\n\treturn err;\n}\n\n\nstatic int unix_getname(struct socket *sock, struct sockaddr *uaddr, int peer)\n{\n\tstruct sock *sk = sock->sk;\n\tstruct unix_address *addr;\n\tDECLARE_SOCKADDR(struct sockaddr_un *, sunaddr, uaddr);\n\tint err = 0;\n\n\tif (peer) {\n\t\tsk = unix_peer_get(sk);\n\n\t\terr = -ENOTCONN;\n\t\tif (!sk)\n\t\t\tgoto out;\n\t\terr = 0;\n\t} else {\n\t\tsock_hold(sk);\n\t}\n\n\taddr = smp_load_acquire(&unix_sk(sk)->addr);\n\tif (!addr) {\n\t\tsunaddr->sun_family = AF_UNIX;\n\t\tsunaddr->sun_path[0] = 0;\n\t\terr = sizeof(short);\n\t} else {\n\t\terr = addr->len;\n\t\tmemcpy(sunaddr, addr->name, addr->len);\n\t}\n\tsock_put(sk);\nout:\n\treturn err;\n}\n\nstatic int unix_scm_to_skb(struct scm_cookie *scm, struct sk_buff *skb, bool send_fds)\n{\n\tint err = 0;\n\n\tUNIXCB(skb).pid = get_pid(scm->pid);\n\tUNIXCB(skb).uid = scm->creds.uid;\n\tUNIXCB(skb).gid = scm->creds.gid;\n\tUNIXCB(skb).fp = NULL;\n\tunix_get_secdata(scm, skb);\n\tif (scm->fp && send_fds)\n\t\terr = unix_attach_fds(scm, skb);\n\n\tskb->destructor = unix_destruct_scm;\n\treturn err;\n}\n\nstatic bool unix_passcred_enabled(const struct socket *sock,\n\t\t\t\t const struct sock *other)\n{\n\treturn test_bit(SOCK_PASSCRED, &sock->flags) ||\n\t !other->sk_socket ||\n\t test_bit(SOCK_PASSCRED, &other->sk_socket->flags);\n}\n\n/*\n * Some apps rely on write() giving SCM_CREDENTIALS\n * We include credentials if source or destination socket\n * asserted SOCK_PASSCRED.\n */\nstatic void maybe_add_creds(struct sk_buff *skb, const struct socket *sock,\n\t\t\t const struct sock *other)\n{\n\tif (UNIXCB(skb).pid)\n\t\treturn;\n\tif (unix_passcred_enabled(sock, other)) {\n\t\tUNIXCB(skb).pid = get_pid(task_tgid(current));\n\t\tcurrent_uid_gid(&UNIXCB(skb).uid, &UNIXCB(skb).gid);\n\t}\n}\n\nstatic int maybe_init_creds(struct scm_cookie *scm,\n\t\t\t struct socket *socket,\n\t\t\t const struct sock *other)\n{\n\tint err;\n\tstruct msghdr msg = { .msg_controllen = 0 };\n\n\terr = scm_send(socket, &msg, scm, false);\n\tif (err)\n\t\treturn err;\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_HTML_>\n\n<!DOCTYPE html>\n<html class=\"writer-html5\" lang=\"en\" >\n<head>\n <meta charset=\"utf-8\">\n \n <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\n \n <title>dscribe.descriptors.coulombmatrix &mdash; DScribe 0.3.6a0 documentation</title>\n \n\n </code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_>let [a, op, b] = require(\"fs\").readFileSync(\"/dev/stdin\", \"utf8\").split(\" \");\n[a, b] = [a, b].map(Number);\nconsole.log(op == \"+\" ? a + b : a - b);</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Go_>package discovery\n\n/**\n * lxd.go - LXD API discovery implementation\n *\n * @author Joe Topjian <joe@topjian.net>\n */\n\nimport (\n\t\"encoding/pem\"\n\t\"fmt\"\n\t\"os\"\n\t\"strings\"\n\t\"time\"\n\n\tlxd \"github.com/lxc/lxd/client\"\n\tlxd_config \"github.com/lxc/lxd/lxc/config\"\n\t\"github.com/lxc/lxd/shared\"\n\tlxd_api \"github.com/lxc/lxd/shared/api\"\n\t\"github.com/yyyar/gobetween/config\"\n\t\"github.com/yyyar/gobetween/core\"\n\t\"github.com/yyyar/gobetween/logging\"\n\t\"github.com/yyyar/gobetween/utils\"\n)\n\nconst (\n\tlxdRetryWaitDuration = 2 * time.Second\n\tlxdTimeout = 5 * time.Second\n)\n\n/**\n * Create new Discovery with LXD fetch func\n */\nfunc NewLXDDiscovery(cfg config.DiscoveryConfig) interface{} {\n\n\td := Discovery{\n\t\topts: DiscoveryOpts{lxdRetryWaitDuration},\n\t\tfetch: lxdFetch,\n\t\tcfg: cfg,\n\t}\n\n\treturn &d\n}\n\n/**\n * Fetch backends from LXD API\n */\nfunc lxdFetch(cfg config.DiscoveryConfig) (*[]core.Backend, error) {\n\tlog := logging.For(\"lxdFetch\")\n\n\t/* Get an LXD client */\n\tclient, err := lxdBuildClient(cfg)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t/* Get an LXD config */\n\tlxdConfig, err := lxdBuildConfig(cfg)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t/* Set the timeout for the client */\n\thttpClient, err := client.GetHTTPClient()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\thttpClient.Timeout = utils.ParseDurationOrDefault(cfg.Timeout, lxdTimeout)\n\n\tlog.Debug(\"Fetching containers from \", lxdConfig.Remotes[cfg.LXDServerRemoteName].Addr)\n\n\t/* Create backends from response */\n\tbackends := []core.Backend{}\n\n\t/* Fetch containers */\n\tcontainers, err := client.GetContainers()\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\tfor _, container := range containers {\n\n\t\t/* Ignore containers that aren't running */\n\t\tif container.Status != \"Running\" {\n\t\t\tcontinue\n\t\t}\n\n\t\t/* Ignore continers if not match label key and value */\n\t\tif cfg.LXDContainerLabelKey != \"\" {\n\n\t\t\tactualLabelValue, ok := container.Config[cfg.LXDContainerLabelKey]\n\t\t\tif !ok {\n\t\t\t\tcontinue\n\t\t\t}\n\n\t\t\tif cfg.LXDContainerLabelValue != \"\" && actualLabelValue != cfg.LXDContainerLabelValue {\n\t\t\t\tcontinue\n\t\t\t}\n\t\t}\n\n\t\t/* Try get container port either from label, or from discovery config */\n\t\tport := fmt.Sprintf(\"%v\", cfg.LXDContainerPort)\n\n\t\tif cfg.LXDContainerPortKey != \"\" {\n\t\t\tif p, ok := container.Config[cfg.LXDContainerPortKey]; ok {\n\t\t\t\tport = p\n\t\t\t}\n\t\t}\n\n\t\tif port == \"\" {\n\t\t\tlog.Warn(fmt.Sprintf(\"Port is not found in neither in lxd_container_port config not in %s label for %s. Skipping\",\n\t\t\t\tcfg.LXDContainerPortKey, container.Name))\n\t\t\tcontinue\n\t\t}\n\n\t\t/* iface is the container interface to get an IP address. */\n\t\t/* This isn't exposed by the LXD API, and containers can have multiple interfaces, */\n\t\tiface := cfg.LXDContainerInterface\n\t\tif v, ok := container.Config[cfg.LXDContainerInterfaceKey]; ok {\n\t\t\tiface = v\n\t\t}\n\n\t\tip := \"\"\n\t\tif ip, err = lxdDetermineContainerIP(client, container.Name, iface, cfg.LXDContainerAddressType); err != nil {\n\t\t\tlog.Error(fmt.Sprintf(\"Can't determine %s container ip address: %s. Skipping\", container.Name, err))\n\t\t\tcontinue\n\t\t}\n\n\t\tsni := \"\"\n\t\tif v, ok := container.Config[cfg.LXDContainerSNIKey]; ok {\n\t\t\tsni = v\n\t\t}\n\n\t\tbackends = append(backends, core.Backend{\n\t\t\tTarget: core.Target{\n\t\t\t\tHost: ip,\n\t\t\t\tPort: port,\n\t\t\t},\n\t\t\tPriority: 1,\n\t\t\tWeight: 1,\n\t\t\tStats: core.BackendStats{\n\t\t\t\tLive: true,\n\t\t\t},\n\t\t\tSni: sni,\n\t\t})\n\t}\n\n\treturn &backends, nil\n}\n\n/**\n * Create new LXD Client\n */\nfunc lxdBuildClient(cfg config.DiscoveryConfig) (lxd.ContainerServer, error) {\n\tlog := logging.For(\"lxdBuildClient\")\n\n\t/* Make a client to pass around */\n\tvar client lxd.ContainerServer\n\n\t/* Build a configuration with the requested options */\n\tlxdConfig, err := lxdBuildConfig(cfg)\n\tif err != nil {\n\t\treturn client, err\n\t}\n\n\tif strings.HasPrefix(cfg.LXDServerAddress, \"https:\") {\n\n\t\t/* Validate or generate certificates on the client side (gobetween) */\n\t\tif cfg.LXDGenerateClientCerts {\n\t\t\tlog.Debug(\"Generating LXD client certificates\")\n\t\t\tif err := lxdConfig.GenerateClientCertificate(); err != nil {\n\t\t\t\treturn nil, err\n\t\t\t}\n\t\t}\n\n\t\t/* Validate or accept certificates on the server side (LXD) */\n\t\tserverCertf := lxdConfig.ServerCertPath(cfg.LXDServerRemoteName)\n\t\tif !shared.PathExists(serverCertf) {\n\t\t\t/* If the server certificate was not found, either gobetween and the LXD server are set\n\t\t\t * up for PKI, or gobetween must authenticate with the LXD server and accept its server\n\t\t\t * certificate.\n\t\t\t *\n\t\t\t * First, see if communication with the LXD server is possible.\n\t\t\t */\n\t\t\t_, err := lxdConfig.GetContainerServer(cfg.LXDServerRemoteName)\n\t\t\tif err != nil {\n\t\t\t\t/* If there was an error, then gobetween will try to download the server's cert. */\n\t\t\t\tif cfg.LXDAcceptServerCert {\n\t\t\t\t\tlog.Debug(\"Retrieving LXD server certificate\")\n\t\t\t\t\terr := lxdGetRemoteCertificate(lxdConfig, cfg.LXDServerRemoteName)\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\treturn nil, fmt.Errorf(\"Could obtain LXD server certificate: %s\", err)\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\terr := fmt.Errorf(\"Unable to communicate with LXD server. Either set \" +\n\t\t\t\t\t\t\"lxd_accept_server_cert to true or add the LXD server out of \" +\n\t\t\t\t\t\t\"band of gobetween and try again.\")\n\t\t\t\t\treturn nil, err\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t/*\n\t\t * Finally, check and see if gobetween needs to authenticate with the LXD server.\n\t\t * Authentication happens only once. After that, gobetween will be a trusted client\n\t\t * as long as the exchanged certificates to not change.\n\t\t *\n\t\t * Authentication must happen even if PKI is in use.\n\t\t */\n\t\tclient, err = lxdConfig.GetContainerServer(cfg.LXDServerRemoteName)\n\t\tif err != nil {\n\t\t\treturn nil, err\n\t\t}\n\n\t\tlog.Info(\"Authenticating to LXD server\")\n\t\terr = lxdAuthenticateToServer(client, cfg.LXDServerRemoteName, cfg.LXDServerRemotePassword)\n\t\tif err != nil {\n\t\t\tlog.Info(\"Authentication unsuccessful\")\n\t\t\treturn nil, err\n\t\t}\n\n\t\tlog.Info(\"Authentication successful\")\n\t}\n\n\t/* Build a new client */\n\tclient, err = lxdConfig.GetContainerServer(cfg.LXDServerRemoteName)\n\tif err != nil {\n\t\treturn nil, err\n\t}\n\n\t/* Validate the client config and connectivity */\n\tif _, _, err := client.GetServer(); err != nil {\n\t\treturn nil, err\n\t}\n\n\treturn client, nil\n}\n\n/**\n * Create LXD Client Config\n */</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>#!/usr/bin/env python3\n#\n# Copyright 2019 The Hafnium Authors.\n#\n# Use of this source code is governed by a BSD-style\n# license that can be found in the LICENSE file or at\n# https://opensource.org/licenses/BSD-3-Clause.\n\n\"\"\"Check ELF file for assembly-level regressions.\n\nObjdumps the given ELF file and detects known assembly patterns, checking for\nregressions on bugs such as CPU erratas. Throws an exception if a broken pattern\nis detected.\n\"\"\"\n\nimport argparse\nimport os\nimport re\nimport subprocess\nimport sys\n\nHF_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))\nCLANG_ROOT = os.path.join(HF_ROOT, \"prebuilts\", \"linux-x64\", \"clang\")\nOBJDUMP = os.path.join(CLANG_ROOT, \"bin\", \"llvm-objdump\")\nNM = os.path.join(CLANG_ROOT, \"bin\", \"llvm-nm\")\n\ndef check_eret_speculation_barrier(args):\n\t\"\"\"\n\tSome ARM64 CPUs speculatively execute instructions after ERET.\n\tCheck that every ERET is followed by DSB NSH and ISB.\n\t\"\"\"\n\n\tobjdump_stdout = subprocess\\\n\t\t.check_output([ OBJDUMP, \"-d\", args.input_elf ])\\\n\t\t.decode(\"utf-8\")\\\n\t\t.splitlines()\n\n\tfound_eret = False\n\n\tSTATE_DEFAULT = 1\n\tSTATE_EXPECT_DSB_NSH = 2\n\tSTATE_EXPECT_ISB = 3\n\n\tREGEX_ERET = re.compile(r\"^\\s*[0-9a-f]+:\\s*e0 03 9f d6\\s+eret$\")\n\tREGEX_DSB_NSH = re.compile(r\"^\\s*[0-9a-f]+:\\s*9f 37 03 d5\\s*dsb\\s+nsh$\")\n\tREGEX_ISB = re.compile(r\"^\\s*[0-9a-f]+:\\s*df 3f 03 d5\\s+isb$\")\n\n\tstate = STATE_DEFAULT\n\tfor line in objdump_stdout:\n\t\tif state == STATE_DEFAULT:\n\t\t\tif re.match(REGEX_ERET, line):\n\t\t\t\tfound_eret = True\n\t\t\t\tstate = STATE_EXPECT_DSB_NSH\n\t\telif state == STATE_EXPECT_DSB_NSH:\n\t\t\tif re.match(REGEX_DSB_NSH, line):\n\t\t\t\tstate = STATE_EXPECT_ISB\n\t\t\telse:\n\t\t\t\traise Exception(\"ERET not followed by DSB NSH\")\n\t\telif state == STATE_EXPECT_ISB:\n\t\t\tif re.match(REGEX_ISB, line):\n\t\t\t\tstate = STATE_DEFAULT\n\t\t\telse:\n\t\t\t\traise Exception(\"ERET not followed by ISB\")\n\n\t# Ensure that at least one instance was found, otherwise the regexes are\n\t# probably wrong.\n\tif not found_eret:\n\t\traise Exception(\"Could not find any ERET instructions\")\n\ndef check_max_image_size(args):\n\t\"\"\"\n\tCheck that the ELF's effective image size does not exceed maximum\n\tallowed image size, if specified in command-line arguments.\n\t\"\"\"\n\n\tif args.max_image_size <= 0:\n\t\treturn\n\n\tnm_stdout = subprocess\\\n\t\t.check_output([ NM, args.input_elf ])\\\n\t\t.decode(\"utf-8\")\\\n\t\t.splitlines()\n\n\tCOLUMN_COUNT = 3\n\tCOLUMN_IDX_VALUE = 0\n\tCOLUMN_IDX_TYPE = 1\n\tCOLUMN_IDX_NAME = 2\n\n\timage_size = None\n\tfor line in nm_stdout:\n\t\tline = line.split()\n\t\tif len(line) != COLUMN_COUNT:\n\t\t\traise Exception(\n\t\t\t\t\"Unexpected number of columns in NM output\")\n\n\t\tif line[COLUMN_IDX_NAME] == \"image_size\":\n\t\t\tif line[COLUMN_IDX_TYPE] != \"A\":\n\t\t\t\traise Exception(\n\t\t\t\t\t\"Unexpected type of image_size symbol\")\n\t\t\timage_size = int(line[COLUMN_IDX_VALUE], 16)\n\t\t\tbreak\n\n\tif image_size is None:\n\t\traise Exception(\"Could not find value of image_size symbol\")\n\telif image_size > args.max_image_size:\n\t\traise Exception(\n\t\t\t\"Image size exceeds maximum allowed image size \" +\n\t\t\t\"({}B > {}B)\".format(image_size, args.max_image_size))\n\ndef Main():\n\tparser = argparse.ArgumentParser()\n\tparser.add_argument(\"input_elf\",\n\t\thelp=\"ELF file to analyze\")\n\tparser.add_argument(\"stamp_file\",\n\t\thelp=\"file to be touched if successful\")\n\tparser.add_argument(\"--max-image-size\",\n\t\trequired=False, type=int, default=0,\n\t\thelp=\"maximum allowed image size in bytes\")\n\targs = parser.parse_args()\n\n\tcheck_eret_speculation_barrier(args)\n\tcheck_max_image_size(args)\n\n\t# Touch `stamp_file`.\n\twith open(args.stamp_file, \"w\"):\n\t\tpass\n\n\treturn 0\n\nif __name__ == \"__main__\":\n\tsys.exit(Main())\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Pascal_> ID_ABOUT:\n begin\n SetForegroundWindow(Wnd);\n DialogBox(HInstance, PChar(ID_ABOUT), Wnd, @SimpleDlgProc);\n end;\n ID_CONFIGURE:\n begin\n SetForeGroundWindow(Wnd);\n DialogBox(HInstance, PChar(ID_CONFIGURE), Wnd, @SimpleDlgProc);\n end;\n SC_CLOSE: SendMessage(Wnd, WM_Close, 0, 0);\n end;\n WM_SYSCOMMAND:\n case wp of\n SC_RESTORE, SC_MINIMIZE, ID_ABOUT, ID_CONFIGURE, SC_CLOSE:\n begin\n Result := 1;\n SendMessage(Wnd, WM_COMMAND, wp, 0);\n end\n else\n Result := DefWindowProc(Wnd, Msg, wp, lp);</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_CSS_>body {\n font-family: 'Arial', 'Helvetica Neue', Helvetica, sans-serif;\n color: #777;\n background: #777;\n width: 960px;\n font-size: 18px;\n text-align: center;\n margin: auto;\n}\n\nheader {\n background: #ebebeb;\n color: #777;\n margin: 10px;\n padding: 20px;\n}\n\nnav {\n background: #ebebeb;\n color: #777;\n margin: 10px;\n padding: 20px;\n}\n\nsection {\n background: #ebebeb;\n width: 495px;\n float: left;\n height: 275px;\n color: #777;\n margin: 10px;\n padding: 20px;\n position: relative;\n}\n\naside {\n background: #ebebeb;\n float: left;\n margin: 10px;\n padding: 20px;\n width: 345px;\n height: 275px;\n}\n\nfooter {\n background: #ebebeb;\n margin: 10px;\n padding: 20px;\n width: 900px;\n clear: both;\n display: inline-block;\n}\n\narticle {\n color: #ebebeb;\n background: #777;\n padding: 20px;\n position: absolute;\n bottom: 10px;\n left: 10px;\n right: 10px;\n}\n\nh1 {\n color: #777;\n background: #ebebeb;\n margin: 10px;\n padding: 20px;\n}\n\np {\n color: #777;\n background: #ebebeb;\n margin: 10px;\n padding: 20px;\n}</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_> if self.mode == 'mean':\n hist = torch.div(hist, user_behavior_length.type(torch.float32) + self.eps)\n\n hist = torch.unsqueeze(hist, dim=1)\n return hist\n\n\n# class AttentionSequencePoolingLayer(nn.Module):\n# \"\"\"The Attentional sequence pooling operation used in DIN.\n#\n# Input shape\n# - A list of three tensor: [query,keys,keys_length]\n#\n# - query is a 3D tensor with shape: ``(batch_size, 1, embedding_size)``\n#\n# - keys is a 3D tensor with shape: ``(batch_size, T, embedding_size)``\n#\n# - keys_length is a 2D tensor with shape: ``(batch_size, 1)``\n#\n# Output shape\n# - 3D tensor with shape: ``(batch_size, 1, embedding_size)``\n#\n# Arguments\n# - **att_hidden_units**: List of positive integer, the attention net layer number and units in each layer.\n#\n# - **embedding_dim**: Dimension of the input embeddings.\n#\n# - **activation**: Activation function to use in attention net.\n#\n# - **weight_normalization**: bool.Whether normalize the attention score of local activation unit.\n#\n# References\n# - [Zhou G, Zhu X, Song C, et al. Deep interest network for click-through rate prediction[C]//Proceedings of the 24th ACM SIGKDD International Conference on Knowledge Discovery & Data Mining. ACM, 2018: 1059-1068.](https://arxiv.org/pdf/1706.06978.pdf)\n# \"\"\"\n#\n# def __init__(self, att_hidden_units=[80, 40], embedding_dim=4, activation='Dice', weight_normalization=False):\n# super(AttentionSequencePoolingLayer, self).__init__()\n#\n# self.local_att = LocalActivationUnit(hidden_units=att_hidden_units, embedding_dim=embedding_dim,\n# activation=activation)\n#\n# def forward(self, query, keys, keys_length):\n# # query: [B, 1, E], keys: [B, T, E], keys_length: [B, 1]\n# # TODO: Mini-batch aware regularization in originial paper [Zhou G, et al. 2018] is not implemented here. As the authors mentioned\n# # it is not a must for small dataset as the open-sourced ones.\n# attention_score = self.local_att(query, keys)\n# attention_score = torch.transpose(attention_score, 1, 2) # B * 1 * T\n#\n# # define mask by length\n# keys_length = keys_length.type(torch.LongTensor)\n# mask = torch.arange(keys.size(1))[None, :] < keys_length[:, None] # [1, T] < [B, 1, 1] -> [B, 1, T]\n#\n# # mask\n# output = torch.mul(attention_score, mask.type(torch.FloatTensor)) # [B, 1, T]\n#\n# # multiply weight\n# output = torch.matmul(output, keys) # [B, 1, E]\n#\n# return output\n\n\nclass KMaxPooling(nn.Module):\n \"\"\"K Max pooling that selects the k biggest value along the specific axis.\n\n Input shape\n - nD tensor with shape: ``(batch_size, ..., input_dim)``.\n\n Output shape\n - nD tensor with shape: ``(batch_size, ..., output_dim)``.\n\n Arguments\n - **k**: positive integer, number of top elements to look for along the ``axis`` dimension.\n\n - **axis**: positive integer, the dimension to look for elements.\n\n \"\"\"\n\n def __init__(self, k, axis, device='cpu'):\n super(KMaxPooling, self).__init__()\n self.k = k\n self.axis = axis\n self.to(device)\n\n def forward(self, input):\n if self.axis < 0 or self.axis >= len(input.shape):\n raise ValueError(\"axis must be 0~%d,now is %d\" %\n (len(input.shape) - 1, self.axis))\n\n if self.k < 1 or self.k > input.shape[self.axis]:\n raise ValueError(\"k must be in 1 ~ %d,now k is %d\" %\n (input.shape[self.axis], self.k))\n\n out = torch.topk(input, k=self.k, dim=self.axis, sorted=True)[0]\n return out\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Haskell_>{- |\nCopyright : (c) Runtime Verification, 2020-2021\nLicense : BSD-3-Clause\n-}\nmodule Kore.Equation.Application (\n attemptEquation,\n AttemptEquationResult,\n applyEquation,\n applySubstitutionAndSimplify,\n) where\n\nimport Control.Error (\n ExceptT,\n MaybeT (..),\n maybeToList,\n noteT,\n runExceptT,\n throwE,\n )\nimport Control.Monad (\n (>=>),\n )\nimport Control.Monad.Except (\n catchError,\n )\nimport Data.Map.Strict (\n Map,\n )\nimport qualified Data.Map.Strict as Map\nimport Data.Set (\n Set,\n )\nimport qualified Data.Set as Set\nimport qualified Kore.Attribute.Axiom as Attribute\nimport Kore.Attribute.Pattern.FreeVariables (\n HasFreeVariables (..),\n )\nimport qualified Kore.Attribute.Pattern.FreeVariables as FreeVariables\nimport Kore.Equation.DebugEquation\nimport Kore.Equation.Equation (\n Equation (..),\n )\nimport qualified Kore.Equation.Equation as Equation\nimport Kore.Internal.Condition (\n Condition,\n )\nimport qualified Kore.Internal.Condition as Condition\nimport qualified Kore.Internal.OrCondition as OrCondition\nimport Kore.Internal.OrPattern (\n OrPattern,\n )\nimport qualified Kore.Internal.OrPattern as OrPattern\nimport Kore.Internal.Pattern (\n Conditional (..),\n Pattern,\n )\nimport qualified Kore.Internal.Pattern as Pattern\nimport Kore.Internal.Predicate (\n Predicate,\n makeAndPredicate,\n makeNotPredicate,\n )\nimport Kore.Internal.SideCondition (\n SideCondition,\n )\nimport qualified Kore.Internal.SideCondition as SideCondition\nimport Kore.Internal.Substitution (\n Substitution,\n )\nimport qualified Kore.Internal.Substitution as Substitution\nimport Kore.Internal.TermLike (\n TermLike,\n )\nimport qualified Kore.Internal.TermLike as TermLike\nimport Kore.Rewrite.Axiom.Matcher (\n MatchResult,\n matchIncremental,\n )\nimport Kore.Rewrite.RewritingVariable (\n RewritingVariableName,\n )\nimport qualified Kore.Rewrite.SMT.Evaluator as SMT\nimport qualified Kore.Rewrite.Substitution as Substitution\nimport Kore.Simplify.Simplify (\n MonadSimplify,\n )\nimport qualified Kore.Simplify.Simplify as Simplifier\nimport Kore.Substitute</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_HTML_> <div id=\"MSearchBox\" class=\"MSearchBoxInactive\">\n <span class=\"left\">\n <img id=\"MSearchSelect\" src=\"search/mag_sel.png\"\n onmouseover=\"return searchBox.OnSearchSelectShow()\"\n onmouseout=\"return searchBox.OnSearchSelectHide()\"\n alt=\"\"/>\n <input type=\"text\" id=\"MSearchField\" value=\"Search\" accesskey=\"S\"\n onfocus=\"searchBox.OnSearchFieldFocus(true)\" \n onblur=\"searchBox.OnSearchFieldFocus(false)\" \n onkeyup=\"searchBox.OnSearchFieldChange(event)\"/>\n </span><span class=\"right\">\n <a id=\"MSearchClose\" href=\"javascript:searchBox.CloseResultsWindow()\"><img id=\"MSearchCloseImg\" border=\"0\" src=\"search/close.png\" alt=\"\"/></a>\n </span>\n </div>\n </li>\n </ul>\n </div>\n<!-- window showing the filter options -->\n<div id=\"MSearchSelectWindow\"\n onmouseover=\"return searchBox.OnSearchSelectShow()\"\n onmouseout=\"return searchBox.OnSearchSelectHide()\"\n onkeydown=\"return searchBox.OnSearchSelectKey(event)\">\n<a class=\"SelectItem\" href=\"javascript:void(0)\" onclick=\"searchBox.OnSelectItem(0)\"><span class=\"SelectionMark\">&#160;</span>All</a><a class=\"SelectItem\" href=\"javascript:void(0)\" onclick=\"searchBox.OnSelectItem(1)\"><span class=\"SelectionMark\">&#160;</span>Classes</a><a class=\"SelectItem\" href=\"javascript:void(0)\" onclick=\"searchBox.OnSelectItem(2)\"><span class=\"SelectionMark\">&#160;</span>Namespaces</a><a class=\"SelectItem\" href=\"javascript:void(0)\" onclick=\"searchBox.OnSelectItem(3)\"><span class=\"SelectionMark\">&#160;</span>Files</a><a class=\"SelectItem\" href=\"javascript:void(0)\" onclick=\"searchBox.OnSelectItem(4)\"><span class=\"SelectionMark\">&#160;</span>Functions</a><a class=\"SelectItem\" href=\"javascript:void(0)\" onclick=\"searchBox.OnSelectItem(5)\"><span class=\"SelectionMark\">&#160;</span>Variables</a></div>\n\n<!-- iframe showing the search results (closed by default) -->\n<div id=\"MSearchResultsWindow\">\n<iframe src=\"javascript:void(0)\" frameborder=\"0\" \n name=\"MSearchResults\" id=\"MSearchResults\">\n</iframe>\n</div>\n\n<div id=\"nav-path\" class=\"navpath\">\n <ul>\n<li class=\"navelem\"><a class=\"el\" href=\"dir_75b82e7e4a5feb05200b9ad7adf06257.html\">home</a></li><li class=\"navelem\"><a class=\"el\" href=\"dir_f398cca5446548e056949a7f74062791.html\">ogre</a></li><li class=\"navelem\"><a class=\"el\" href=\"dir_b089e5ac376fc367db6b27dc0e94437a.html\">Dropbox</a></li> </ul>\n</div>\n</div><!-- top -->\n<div class=\"header\">\n <div class=\"headertitle\">\n<div class=\"title\">Dropbox Directory Reference</div> </div>\n</div><!--header-->\n<div class=\"contents\">\n<table class=\"memberdecls\">\n<tr class=\"heading\"><td colspan=\"2\"><h2 class=\"groupheader\"><a name=\"subdirs\"></a>\nDirectories</h2></td></tr>\n<tr class=\"memitem:dir_52ccc41918f6ada6b954baadfc00f24a\"><td class=\"memItemLeft\" align=\"right\" valign=\"top\">directory &#160;</td><td class=\"memItemRight\" valign=\"bottom\"><a class=\"el\" href=\"dir_52ccc41918f6ada6b954baadfc00f24a.html\">Skole</a></td></tr></code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C++_>#include <atcoder/math>\n#include <bits/stdc++.h>\nusing namespace std;\nusing namespace atcoder;\ntypedef long long ll;\nint main() {\n\tint T;\n\tcin >> T;\n\tfor (int i = 0; i < T; ++ i) {\n\t\tint n, m, a, b;\n\t\tcin >> n >> m >> a >> b;\n\t\tcout << floor_sum(n, m, a, b) << endl;\n\t}\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_>from heapq import*\ni=input\nfor s in[0]*int(i()):\n n,x,*y=int(i()),[]\n for _ in'_'*n:k,l,r=t=[*map(int,i().split())];x+=[t]*(l>r);y+=[[n-k,r,l]]*(l<=r)\n for x in x,y:\n x.sort();n,*h=len(x),\n while h or x:\n while[[n]]<x[-1:]:k,l,r=x.pop();heappush(h,(r-l,l,r))\n if h:s+=heappop(h)[2-(n>0)]\n n-=1\n print(s)</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_SQL_>\ndrop TABLE \"basic_auth\" ;\nCREATE TABLE \"basic_auth\" (\n\t \"username\"\t\t\t\tchar varying (200) not null primary key\n\t, \"salt\"\t\t\t\t\tchar varying (100) not null\n\t, \"password\"\t\t\t\tchar varying (180) not null \n);\ninsert into \"basic_auth\" ( \"username\", \"salt\", \"password\" ) values ( 'example.com:testme', 'salt', '4c205db6b361042ee973f0341433088922232dfb41d6b0721f8f91747bd0f71fc8ccefe250c3233c2c85a3e70e78d11cd98b8cf1d5f7a797f71dd2069a8fcc62' );\n\n\\q\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C_>#pragma once\n\n/// @file\n/// @brief Class template mbe::HandleBase\n\n#include <ostream>\n#include <limits>\n\n#include <MBE/Core/HandleID.h>\n\nnamespace mbe\n{\n\t/// @brief Creates an id for the derived object and provides functionality to access it through an id\n\t/// @see HandleID\n\ttemplate <class TDerived>\n\tclass HandleBase\n\t\t// Make doxygen ignore the abstract definition\n\t\t/// @cond\n\t\tabstract\n\t\t/// @endcond\n\t{\n\tpublic:\n\t\ttypedef HandleID<const TDerived> ConstID;\n\t\ttypedef HandleID<TDerived> ID;\n\n\tpublic:\n\t\t/// @brief Default constructor\n\t\tHandleBase();\n\n\t\t/// @brief Copy constructor\n\t\t/// @details A new handle id is generated for the copy\n\t\tHandleBase(const HandleBase& other);\n\n\t\t/// @brief Copy assignment operator\n\t\t/// @details A new handle id is generated for the copy\n\t\tHandleBase& operator= (const HandleBase& other);\n\n\t\t/// @brief Default destructor\n\t\tvirtual ~HandleBase();\n\n\tpublic:\n\t\t/// @brief Returns the id of this object\n\t\t/// @details Note that the ids will start from 0 for every derived type\n\t\tconst HandleID<TDerived>& GetHandleID();\n\n\t\t// Const overload (can't be a reference - The automatic conversion creates a new object)\n\n\t\t/// @brief Returns the const id of this object\n\t\t/// @details Const overload\n\t\tHandleID<const TDerived> GetHandleID() const;\n\n\t\t/// @brief Returns an id for which no object exists\n\t\t/// @details The null id will never be valid and contain a nullptr. Two null ids will be equal when compared.\n\t\t/// @returns An invalid id\n\t\tstatic const HandleID<TDerived> GetNullID();\n\n\tprivate:\n\t\t// Can't be const because of the constructor\n\t\t// But only a const reference is exposed\n\t\tHandleID<TDerived> id; // The id of this (handle) object\n\t};\n\n#pragma region Template Implementations\n\n\ttemplate<class TDerived>\n\tHandleBase<TDerived>::HandleBase()\n\t{\n\t\t// Should always be save\n\t\tconst auto& nextId = HandleID<TDerived>::NextHandleID(); // Underlying id\n\t\t// asserting the dynamic_cast does not work since no polimorph type is used\n\t\tHandleID<TDerived>::GetMap()[nextId] = static_cast<TDerived*>(this);\n\t\tid = { nextId }; // Must be after the id has been added to the map (So that the map lookup caches the correct pointer)\n\t}\n\n\ttemplate<class TDerived>\n\tinline HandleBase<TDerived>::HandleBase(const HandleBase& other)\n\t{\n\t\tconst auto& nextId = HandleID<TDerived>::NextHandleID(); // Underlying id\n\t\tHandleID<TDerived>::GetMap()[nextId] = static_cast<TDerived*>(this);\n\t\tid = { nextId };\n\t}\n\n\ttemplate<class TDerived>\n\tinline HandleBase<TDerived>& HandleBase<TDerived>::operator=(const HandleBase& other)\n\t{\n\t\t// Check for self assignment\n\t\tif (&other == this)\n\t\t\treturn *this;\n\n\t\tconst auto& nextId = HandleID<TDerived>::NextHandleID();\n\t\tHandleID<TDerived>::GetMap()[nextId] = static_cast<TDerived*>(this);\n\t\tthis->id = { nextId };\n\t\treturn *this;\n\t}\n\n\ttemplate<class TDerived>\n\tHandleBase<TDerived>::~HandleBase()\n\t{\n\t\tauto it = HandleID<TDerived>::GetMap().find(id.GetUnderlyingID());\n\t\tHandleID<TDerived>::GetMap().erase(it);\n\t}\n\n\ttemplate<class TDerived>\n\tinline const HandleID<TDerived>& HandleBase<TDerived>::GetHandleID()\n\t{\n\t\treturn id;\n\t}\n\n\ttemplate<class TDerived>\n\tinline HandleID<const TDerived> HandleBase<TDerived>::GetHandleID() const\n\t{\n\t\treturn id;\n\t}\n\n\ttemplate<class TDerived>\n\tinline const HandleID<TDerived> HandleBase<TDerived>::GetNullID()\n\t{\n\t\treturn { std::numeric_limits<HandleID<TDerived>::UnderlyingType>::max() };\n\t}\n\n#pragma endregion\n\n} // namespace mbe</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>from django.contrib import admin\nfrom django.contrib.auth import admin as auth_admin\nfrom django.contrib.auth import get_user_model\n\nfrom reprohack_hub.users.forms import UserChangeForm, UserCreationForm\n\nUser = get_user_model()\n\n\n@admin.register(User)\nclass UserAdmin(auth_admin.UserAdmin):\n\n form = UserChangeForm\n add_form = UserCreationForm\n fieldsets = ((\"User\", {\"fields\": (\"name\",)}),) + auth_admin.UserAdmin.fieldsets\n list_display = [\"username\", \"name\", \"is_superuser\"]\n search_fields = [\"name\"]\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Go_>//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Perl_>use lib 't/lib';\n\n$ENV{ES_VERSION} = '0_90';\n$ENV{ES_CXN} = 'LWP';\ndo \"es_sync_fork.pl\" or die( $@ || $! );\n\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>n=int(input())\na=list(map(int,input().split()))\nok=True\n\nv=[0 for i in range(n+2)]\nimport time\n\n#start=time.time()\nnextv=0\n\nnec=2**n\n\nfor i in range(n,-1,-1):\n\n if nec>=v[i+1]+a[i]:\n v[i]=v[i+1]+a[i]\n else:\n v[i]=nec\n\n nec=nec >>1\n\n\n\n#print(time.time()-start)\n\nfor i in range(1,n+1):\n if v[i-1]-a[i-1]<0:\n ok=False\n break\n if (v[i-1]-a[i-1])*2<v[i]:\n v[i]=(v[i-1]-a[i-1])*2\n\n\nif n==0:\n if a[0]!=1:ok=False\nelse:\n if a[0] != 0:ok = False\n\n\n\nans=sum(v)\nif ans==0:ok=False\n\nif ok:\n print(ans)\nelse:\n print(-1)</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_CSS_>@font-face {\n\tfont-family: 'Title';\n\tsrc: url('../assets/fonts/Title.ttf');\n}\n\n@font-face {\n\tfont-family: 'FiraCodeRetina';\n\tsrc: url('../assets/fonts/FiraCode-Retina.ttf');\n}\n\n@font-face {\n\tfont-family: 'Menu';\n\tsrc: url('../assets/fonts/Menu.otf');\n}\n\n@font-face {\n\tfont-family: 'RobotoThin';\n\tsrc: url('../assets/fonts/Roboto-Thin.ttf');\n}</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_>optimizer = dict(\n name='torch_optimizer',\n torch_optim_class='SGD',\n lr=0.06,\n momentum=0.9,\n weight_decay=1e-4\n)\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_>n=int(input())\nprint((n-1)*n//2)</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Kotlin_> @Provides\n fun provideLocalDataSourceGithub(dao: GithubDao) = GithubLocalDataSource(dao)\n\n @Singleton\n @Provides\n fun provideLocalDataSourceStackOverFlow(dao: StackOverFlowDao) = StackOverFlowLocalDataSource(dao)\n}</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Java_>import java.util.Scanner;\n\npublic class Main { \n public static void main(String[] args) {\n Scanner sc = new Scanner(System.in);\n int A = sc.nextInt();\n int B = sc.nextInt();\n int C = sc.nextInt();\n int X = sc.nextInt();\n int Y = sc.nextInt();\n int min =1000000000;\n for (int i =0; i<=Math.max(X,Y) * 2; i+=2) {\n int ans = i * C + Math.max(0,(X - i/2)) * A + Math.max(0,Y - i/2) * B;\n min = Math.min(ans,min);\n }\n System.out.println(min);\n }\n} </code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>import asyncio\nimport os.path\nimport time\nfrom typing import List\n\nimport aiohttp\n\nfrom mcsniperpy.util import announce, request_manager\nfrom mcsniperpy.util import utils as util\nfrom mcsniperpy.util.classes.config import BackConfig, Config, populate_configs</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Rust_>\nuse std::io::*;\nuse std::str::FromStr;\n\npub fn read<T: FromStr>() -> T {\n let stdin = stdin();\n let stdin = stdin.lock();\n let token: String = stdin\n .bytes()\n .map(|c| c.expect(\"failed to read char\") as char)\n .skip_while(|c| c.is_whitespace())\n .take_while(|c| !c.is_whitespace())\n .collect();\n token.parse().ok().expect(\"failed to parse token\")\n}\n\nfn main() {\n let n = read::<usize>();\n let m = read::<i32>();\n let mut pm: Vec<(i32, i32)> = vec![];\n let mut vec_t: Vec<Vec<i32>> = vec![vec![]; n];\n\n for _ in 0..m {\n let p = read::<i32>();\n let m = read::<i32>();\n pm.push((p, m));\n vec_t[(p - 1) as usize].push(m)\n }\n\n for i in 0..vec_t.len() {\n vec_t[i].sort();\n }\n\n for (p, m) in pm {\n let p_num: String = format!(\"{:0>keta$}\", p, keta = 6 as usize);\n let m_num: String = format!(\n \"{:0>keta$}\",\n vec_t[(p - 1) as usize]\n .iter()\n .enumerate()\n .find(|&x| x.1 == &m)\n .unwrap()\n .0\n + 1,\n keta = 6 as usize\n );\n\n println!(\"{}{}\", p_num, m_num);\n }\n}\n\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C#_>\n Assert.True(referenceEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n\n Assert.Equal(2, context.ChangeTracker.Entries().Count());\n\n var parent = context.ChangeTracker.Entries<Parent>().Single().Entity;\n\n Assert.Same(parent, single.Parent);\n Assert.Same(single, parent.SinglePkToPk);\n }\n }\n\n [ConditionalTheory]\n [InlineData(EntityState.Unchanged)]\n [InlineData(EntityState.Modified)]\n [InlineData(EntityState.Deleted)]\n public virtual void Lazy_load_one_to_one_PK_to_PK_reference_to_dependent_already_loaded(EntityState state)\n {\n using (var context = CreateContext(lazyLoadingEnabled: true))\n {\n var changeDetector = (ChangeDetectorProxy)context.GetService<IChangeDetector>();\n\n var parent = context.Set<Parent>().Include(e => e.SinglePkToPk).Single();\n\n ClearLog();\n\n var referenceEntry = context.Entry(parent).Reference(e => e.SinglePkToPk);\n\n context.Entry(parent).State = state;\n\n Assert.True(referenceEntry.IsLoaded);\n\n changeDetector.DetectChangesCalled = false;\n\n Assert.NotNull(parent.SinglePkToPk);\n\n Assert.False(changeDetector.DetectChangesCalled);\n\n Assert.True(referenceEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n\n Assert.Equal(2, context.ChangeTracker.Entries().Count());\n\n var single = context.ChangeTracker.Entries<SinglePkToPk>().Single().Entity;\n\n Assert.Same(single, parent.SinglePkToPk);\n Assert.Same(parent, single.Parent);\n }\n }\n\n [ConditionalTheory]\n [InlineData(EntityState.Unchanged)]\n [InlineData(EntityState.Modified)]\n [InlineData(EntityState.Deleted)]\n public virtual void Lazy_load_many_to_one_reference_to_principal_alternate_key(EntityState state)\n {\n using (var context = CreateContext(lazyLoadingEnabled: true))\n {\n var child = context.Set<ChildAk>().Single(e => e.Id == 32);\n\n ClearLog();\n\n var referenceEntry = context.Entry(child).Reference(e => e.Parent);\n\n context.Entry(child).State = state;\n\n Assert.False(referenceEntry.IsLoaded);\n\n Assert.NotNull(child.Parent);\n\n Assert.True(referenceEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n\n Assert.Equal(2, context.ChangeTracker.Entries().Count());\n\n var parent = context.ChangeTracker.Entries<Parent>().Single().Entity;\n\n Assert.Same(parent, child.Parent);\n Assert.Same(child, parent.ChildrenAk.Single());\n }\n }\n\n [ConditionalTheory]\n [InlineData(EntityState.Unchanged)]\n [InlineData(EntityState.Modified)]\n [InlineData(EntityState.Deleted)]\n public virtual void Lazy_load_one_to_one_reference_to_principal_alternate_key(EntityState state)\n {\n using (var context = CreateContext(lazyLoadingEnabled: true))\n {\n var single = context.Set<SingleAk>().Single();\n\n ClearLog();\n\n var referenceEntry = context.Entry(single).Reference(e => e.Parent);\n\n context.Entry(single).State = state;\n\n Assert.False(referenceEntry.IsLoaded);\n\n Assert.NotNull(single.Parent);\n\n Assert.True(referenceEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n\n Assert.Equal(2, context.ChangeTracker.Entries().Count());\n\n var parent = context.ChangeTracker.Entries<Parent>().Single().Entity;\n\n Assert.Same(parent, single.Parent);\n Assert.Same(single, parent.SingleAk);\n }\n }\n\n [ConditionalTheory]\n [InlineData(EntityState.Unchanged)]\n [InlineData(EntityState.Modified)]\n [InlineData(EntityState.Deleted)]\n public virtual void Lazy_load_one_to_one_reference_to_dependent_alternate_key(EntityState state)\n {\n using (var context = CreateContext(lazyLoadingEnabled: true))\n {\n var parent = context.Set<Parent>().Single();\n\n ClearLog();\n\n var referenceEntry = context.Entry(parent).Reference(e => e.SingleAk);\n\n context.Entry(parent).State = state;\n\n Assert.False(referenceEntry.IsLoaded);\n\n Assert.NotNull(parent.SingleAk);\n\n Assert.True(referenceEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n\n Assert.Equal(2, context.ChangeTracker.Entries().Count());\n\n var single = context.ChangeTracker.Entries<SingleAk>().Single().Entity;\n\n Assert.Same(single, parent.SingleAk);\n Assert.Same(parent, single.Parent);\n }\n }\n\n [ConditionalTheory]\n [InlineData(EntityState.Unchanged)]\n [InlineData(EntityState.Modified)]\n [InlineData(EntityState.Deleted)]\n public virtual void Lazy_load_many_to_one_reference_to_principal_null_FK_alternate_key(EntityState state)\n {\n using (var context = CreateContext(lazyLoadingEnabled: true))\n {\n var child = context.CreateProxy<ChildAk>();\n child.Id = 767;\n\n context.Attach(child);\n\n ClearLog();\n\n var referenceEntry = context.Entry(child).Reference(e => e.Parent);\n\n context.Entry(child).State = state;\n\n Assert.False(referenceEntry.IsLoaded);\n\n Assert.Null(child.Parent);\n\n Assert.True(referenceEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n\n Assert.Single(context.ChangeTracker.Entries());\n Assert.Null(child.Parent);\n }\n }\n\n [ConditionalTheory]\n [InlineData(EntityState.Unchanged)]\n [InlineData(EntityState.Modified)]\n [InlineData(EntityState.Deleted)]\n public virtual void Lazy_load_one_to_one_reference_to_principal_null_FK_alternate_key(EntityState state)\n {\n using (var context = CreateContext(lazyLoadingEnabled: true))\n {\n var single = context.CreateProxy<SingleAk>();\n single.Id = 767;\n\n context.Attach(single);\n\n ClearLog();\n\n var referenceEntry = context.Entry(single).Reference(e => e.Parent);\n\n context.Entry(single).State = state;\n\n Assert.False(referenceEntry.IsLoaded);\n\n Assert.Null(single.Parent);\n\n Assert.True(referenceEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n\n Assert.Single(context.ChangeTracker.Entries());\n\n Assert.Null(single.Parent);\n }\n }\n\n [ConditionalTheory]\n [InlineData(EntityState.Unchanged)]\n [InlineData(EntityState.Modified)]\n [InlineData(EntityState.Deleted)]\n public virtual void Lazy_load_collection_shadow_fk(EntityState state)\n {\n using (var context = CreateContext(lazyLoadingEnabled: true))\n {\n var parent = context.Set<Parent>().Single();\n\n ClearLog();\n\n var collectionEntry = context.Entry(parent).Collection(e => e.ChildrenShadowFk);\n\n context.Entry(parent).State = state;\n\n Assert.False(collectionEntry.IsLoaded);\n\n Assert.NotNull(parent.ChildrenShadowFk);\n\n Assert.True(collectionEntry.IsLoaded);\n\n RecordLog();\n context.ChangeTracker.LazyLoadingEnabled = false;\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_VisualBasic_>TYPE=VIEW\nquery=select if(isnull(`performance_schema`.`memory_summary_by_host_by_event_name`.`HOST`),\\'background\\',`performance_schema`.`memory_summary_by_host_by_event_name`.`HOST`) AS `host`,sum(`performance_schema`.`memory_summary_by_host_by_event_name`.`CURRENT_COUNT_USED`) AS `current_count_used`,`sys`.`format_bytes`(sum(`performance_schema`.`memory_summary_by_host_by_event_name`.`CURRENT_NUMBER_OF_BYTES_USED`)) AS `current_allocated`,`sys`.`format_bytes`(ifnull((sum(`performance_schema`.`memory_summary_by_host_by_event_name`.`CURRENT_NUMBER_OF_BYTES_USED`) / nullif(sum(`performance_schema`.`memory_summary_by_host_by_event_name`.`CURRENT_COUNT_USED`),0)),0)) AS `current_avg_alloc`,`sys`.`format_bytes`(max(`performance_schema`.`memory_summary_by_host_by_event_name`.`CURRENT_NUMBER_OF_BYTES_USED`)) AS `current_max_alloc`,`sys`.`format_bytes`(sum(`performance_schema`.`memory_summary_by_host_by_event_name`.`SUM_NUMBER_OF_BYTES_ALLOC`)) AS `total_allocated` from `performance_schema`.`memory_summary_by_host_by_event_name` group by if(isnull(`performance_schema`.`memory_summary_by_host_by_event_name`.`HOST`),\\'background\\',`performance_schema`.`memory_summary_by_host_by_event_name`.`HOST`) order by sum(`performance_schema`.`memory_summary_by_host_by_event_name`.`CURRENT_NUMBER_OF_BYTES_USED`) desc\nmd5=7cc67055b195611b3a0d3fc5e738eb81\nupdatable=0\nalgorithm=1\ndefiner_user=mysql.sys\ndefiner_host=localhost\nsuid=0\nwith_check_option=0\ntimestamp=2020-03-20 00:13:39\ncreate-version=1\nsource=SELECT IF(host IS NULL, \\'background\\', host) AS host, SUM(current_count_used) AS current_count_used, sys.format_bytes(SUM(current_number_of_bytes_used)) AS current_allocated, sys.format_bytes(IFNULL(SUM(current_number_of_bytes_used) / NULLIF(SUM(current_count_used), 0), 0)) AS current_avg_alloc, sys.format_bytes(MAX(current_number_of_bytes_used)) AS current_max_alloc, sys.format_bytes(SUM(sum_number_of_bytes_alloc)) AS total_allocated FROM performance_schema.memory_summary_by_host_by_event_name GROUP BY IF(host IS NULL, \\'background\\', host) ORDER BY SUM(current_number_of_bytes_used) DESC\nclient_cs_name=utf8\nconnection_cl_name=utf8_general_ci</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Scala_>package typingsSlinky.reactNavigationNative\n\nimport org.scalablytyped.runtime.Shortcut\nimport typingsSlinky.react.mod.Context\nimport typingsSlinky.reactNavigationNative.anon.Options\nimport org.scalablytyped.runtime.StObject\nimport scala.scalajs.js\nimport scala.scalajs.js.`|`\nimport scala.scalajs.js.annotation.{JSGlobalScope, JSGlobal, JSImport, JSName, JSBracketAccess}\n\nobject linkingContextMod extends Shortcut {\n \n @JSImport(\"@react-navigation/native/lib/typescript/src/LinkingContext\", JSImport.Default)\n @js.native\n val default: Context[Options] = js.native\n \n type _To = Context[Options]\n \n /* This means you don't have to write `default`, but can instead just say `linkingContextMod.foo` */\n override def _to: Context[Options] = default\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_PHP_>class CustomerService\n{\n /**\n * @var CustomerRepository\n */\n private $customerRepository;\n\n public function __construct(CustomerRepository $customerRepository)\n {\n $this->customerRepository = $customerRepository;\n }\n\n /**\n * @return array\n */\n public function getAllCustomers(): array\n {\n return $this->customerRepository->findAll();\n }\n\n /**\n * @param $customerId\n * @return array\n */\n public function getCustomer($customerId) : array\n {\n return $this->customerRepository->customerOfId($customerId);\n }\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_JavaScript_>\nexport default () => (\n <div>\n <Sidebar />\n <div>Hello world!</div>\n </div>\n)</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>from keras.models import *\r\nfrom keras.layers import *\r\nfrom keras.optimizers import *\r\nfrom keras.callbacks import ModelCheckpoint, LearningRateScheduler\r\nfrom keras import backend as keras\r\nimport tensorflow as tf\r\nfrom keras import initializers\r\nfrom keras import regularizers\r\n\r\n#PET\r</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_CSS_>@import \"logo.css\";\n@import \"list.css\";\n@import \"menu.css\";\n\n.sidebar {\n display: flex;\n flex-direction: column;\n gap: 1.5rem;\n inline-size: 12.5rem;\n padding-block-start: 1.5rem;\n padding-block-end: 1.5rem;\n background-color: var(--gray-60);\n}</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_># Copyright 2021 Chaos Mesh Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\"\"\"\nvaraibles used commonly in multiple scripts\n\"\"\"\n\nexport_env_variables = [\n \"HTTP_PROXY\",\n \"HTTPS_PROXY\",\n \"GOPROXY\",\n \"UI\",\n \"LDFLAGS\",\n \"CRATES_MIRROR\",\n \"GO_BUILD_CACHE\",\n \"YARN_BUILD_CACHE\",\n]\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Scala_>\n// Expressions Vs Statements\n\n// A statement is the smallest element of a language\n// that expresses some action to be carried out.\nval foo = 5\n\n// An expression is a combination of GTE one values,\n// constants, variables and functions that a language\n// interprets to *compute a value*.\nval bar = if (10 % 2 == 0) \"even\" else \"odd\"\n// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ \n// This bit above here is an expression!\n\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Java_>/*\n This file is licensed to You under the Apache License, Version 2.0\n (the \"License\"); you may not use this file except in compliance with\n the License. You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n*/\npackage org.xmlunit.diff;\n\nimport java.util.Locale;\n\n/**\n * The kinds of comparisons XMLUnit performs.\n */\npublic enum ComparisonType {\n /**\n * Do both documents specify the same version in their XML declaration?\n */\n XML_VERSION,\n /**\n * Do both documents specify the same standalone declaration in\n * their XML declaration?\n */\n XML_STANDALONE,\n /**\n * Do both documents specify the same encoding in their XML declaration?\n */\n XML_ENCODING,\n /**\n * Do both documents have a DOCTYPE (or neither of each)?\n */\n HAS_DOCTYPE_DECLARATION(true),\n /**\n * If the documents both have DOCTYPEs, compare the names.\n */\n DOCTYPE_NAME(true),\n /**\n * If the documents both have DOCTYPEs, compare the PUBLIC\n * identifiers.\n */\n DOCTYPE_PUBLIC_ID(true),\n /**\n * If the documents both have DOCTYPEs, compare the SYSTEM\n * identifiers.\n */\n DOCTYPE_SYSTEM_ID(true),\n\n /**\n * Check whether both documents provide the same values for\n * xsi:schemaLocation (may even be null).\n */\n SCHEMA_LOCATION,\n /**\n * Check whether both documents provide the same values for\n * xsi:noNamspaceSchemaLocation (may even be null).\n */\n NO_NAMESPACE_SCHEMA_LOCATION,\n\n /**\n * Compare the node types.\n */\n NODE_TYPE,\n\n /**\n * Compare the node's namespace prefixes.\n */\n NAMESPACE_PREFIX,\n /**\n * Compare the node's namespace URIs.\n */\n NAMESPACE_URI,\n\n /**\n * Compare content of text nodes, comments and CDATA sections.\n */\n TEXT_VALUE,\n\n /**\n * Compare targets of processing instructions.\n */\n PROCESSING_INSTRUCTION_TARGET,\n /**\n * Compare data of processing instructions.\n */\n PROCESSING_INSTRUCTION_DATA,\n\n /**\n * Compare element names.\n */\n ELEMENT_TAG_NAME,\n /**\n * Compare explicit/implicit status of attributes.\n */\n ATTR_VALUE_EXPLICITLY_SPECIFIED(\"attribute value explicitly specified\"),\n /**\n * Compare number of attributes.\n */\n ELEMENT_NUM_ATTRIBUTES(\"number of attributes\"),\n /**\n * Compare attribute's value.\n */\n ATTR_VALUE(\"attribute value\"),\n /**\n * Compare number of child nodes.\n */\n CHILD_NODELIST_LENGTH,\n /**\n * Compare order of child nodes.\n */\n CHILD_NODELIST_SEQUENCE,\n\n /**\n * Search for a child node matching a specific child node of the\n * other node.\n */\n CHILD_LOOKUP(\"child\"),\n /**\n * Search for an attribute with a name matching a specific\n * attribute of the other node.\n */\n ATTR_NAME_LOOKUP(\"attribute name\");\n\n private final String description;\n private final boolean doctypeComparison;\n\n private ComparisonType() {\n this(false);\n }\n\n private ComparisonType(boolean doctypeComparison) {\n this(null, doctypeComparison);\n }\n\n private ComparisonType(String description) {\n this(description, false);\n }\n\n private ComparisonType(String description, boolean doctypeComparison) {\n this.description = description;\n this.doctypeComparison = doctypeComparison;\n }\n\n public String getDescription() {\n if (description == null) {\n return name().toLowerCase(Locale.ENGLISH).replace('_', ' ');\n }\n return description;\n }\n\n boolean isDoctypeComparison() {\n return doctypeComparison;\n }\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C++_>// This file has been generated by Py++.\n\n#include \"boost/python.hpp\"\n#include \"wrap_osg.h\"\n#include \"wrap_referenced.h\"\n#include \"shape.pypp.hpp\"\n\nnamespace bp = boost::python;\n\nstruct Shape_wrapper : osg::Shape, bp::wrapper< osg::Shape > {\n\n virtual void accept( ::osg::ShapeVisitor & arg0 ){\n bp::override func_accept = this->get_override( \"accept\" );\n func_accept( boost::ref(arg0) );\n }\n\n virtual void accept( ::osg::ConstShapeVisitor & arg0 ) const {\n bp::override func_accept = this->get_override( \"accept\" );\n func_accept( boost::ref(arg0) );\n }\n\n virtual char const * className( ) const {\n if( bp::override func_className = this->get_override( \"className\" ) )\n return func_className( );\n else{\n return this->osg::Shape::className( );\n }\n }\n \n char const * default_className( ) const {\n return osg::Shape::className( );\n }\n\n virtual ::osg::Object * clone( ::osg::CopyOp const & arg0 ) const {\n bp::override func_clone = this->get_override( \"clone\" );\n return func_clone( boost::ref(arg0) );\n }\n\n virtual ::osg::Object * cloneType( ) const {\n bp::override func_cloneType = this->get_override( \"cloneType\" );\n return func_cloneType( );\n }\n\n virtual bool isSameKindAs( ::osg::Object const * obj ) const {\n if( bp::override func_isSameKindAs = this->get_override( \"isSameKindAs\" ) )\n return func_isSameKindAs( boost::python::ptr(obj) );\n else{\n return this->osg::Shape::isSameKindAs( boost::python::ptr(obj) );\n }\n }\n \n bool default_isSameKindAs( ::osg::Object const * obj ) const {\n return osg::Shape::isSameKindAs( boost::python::ptr(obj) );\n }\n\n virtual char const * libraryName( ) const {\n if( bp::override func_libraryName = this->get_override( \"libraryName\" ) )\n return func_libraryName( );\n else{\n return this->osg::Shape::libraryName( );\n }\n }\n \n char const * default_libraryName( ) const {\n return osg::Shape::libraryName( );\n }\n\n virtual void computeDataVariance( ) {\n if( bp::override func_computeDataVariance = this->get_override( \"computeDataVariance\" ) )\n func_computeDataVariance( );\n else{\n this->osg::Object::computeDataVariance( );\n }\n }\n \n void default_computeDataVariance( ) {\n osg::Object::computeDataVariance( );\n }\n\n virtual ::osg::Referenced * getUserData( ) {\n if( bp::override func_getUserData = this->get_override( \"getUserData\" ) )\n return func_getUserData( );\n else{\n return this->osg::Object::getUserData( );\n }\n }\n \n ::osg::Referenced * default_getUserData( ) {\n return osg::Object::getUserData( );\n }\n\n virtual ::osg::Referenced const * getUserData( ) const {\n if( bp::override func_getUserData = this->get_override( \"getUserData\" ) )\n return func_getUserData( );\n else{\n return this->osg::Object::getUserData( );\n }\n }\n \n ::osg::Referenced const * default_getUserData( ) const {\n return osg::Object::getUserData( );\n }\n\n virtual void resizeGLObjectBuffers( unsigned int arg0 ) {\n if( bp::override func_resizeGLObjectBuffers = this->get_override( \"resizeGLObjectBuffers\" ) )\n func_resizeGLObjectBuffers( arg0 );\n else{\n this->osg::Object::resizeGLObjectBuffers( arg0 );\n }\n }\n \n void default_resizeGLObjectBuffers( unsigned int arg0 ) {\n osg::Object::resizeGLObjectBuffers( arg0 );\n }\n\n virtual void setName( ::std::string const & name ) {\n if( bp::override func_setName = this->get_override( \"setName\" ) )\n func_setName( name );\n else{\n this->osg::Object::setName( name );\n }\n }\n \n void default_setName( ::std::string const & name ) {\n osg::Object::setName( name );\n }\n\n virtual void setThreadSafeRefUnref( bool threadSafe ) {\n if( bp::override func_setThreadSafeRefUnref = this->get_override( \"setThreadSafeRefUnref\" ) )\n func_setThreadSafeRefUnref( threadSafe );\n else{\n this->osg::Object::setThreadSafeRefUnref( threadSafe );\n }\n }\n \n void default_setThreadSafeRefUnref( bool threadSafe ) {\n osg::Object::setThreadSafeRefUnref( threadSafe );\n }\n\n virtual void setUserData( ::osg::Referenced * obj ) {\n if( bp::override func_setUserData = this->get_override( \"setUserData\" ) )\n func_setUserData( boost::python::ptr(obj) );\n else{\n this->osg::Object::setUserData( boost::python::ptr(obj) );\n }\n }\n \n void default_setUserData( ::osg::Referenced * obj ) {\n osg::Object::setUserData( boost::python::ptr(obj) );\n }\n\n};\n\nvoid register_Shape_class(){\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_SQL_> `total_refine_count` INTEGER DEFAULT(NULL) NOT NULL,\r\n `refine_date` timestamp DEFAULT(NULL) NOT NULL\r\n)\r\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Rust_>#[doc = \"Register `se_gmac_0_lca` reader\"]\npub struct R(crate::R<SE_GMAC_0_LCA_SPEC>);\nimpl core::ops::Deref for R {\n type Target = crate::R<SE_GMAC_0_LCA_SPEC>;\n #[inline(always)]\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\nimpl core::convert::From<crate::R<SE_GMAC_0_LCA_SPEC>> for R {\n fn from(reader: crate::R<SE_GMAC_0_LCA_SPEC>) -> Self {\n R(reader)\n }\n}\n#[doc = \"Register `se_gmac_0_lca` writer\"]\npub struct W(crate::W<SE_GMAC_0_LCA_SPEC>);\nimpl core::ops::Deref for W {\n type Target = crate::W<SE_GMAC_0_LCA_SPEC>;\n #[inline(always)]\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\nimpl core::ops::DerefMut for W {\n #[inline(always)]\n fn deref_mut(&mut self) -> &mut Self::Target {\n &mut self.0\n }\n}\nimpl core::convert::From<crate::W<SE_GMAC_0_LCA_SPEC>> for W {\n fn from(writer: crate::W<SE_GMAC_0_LCA_SPEC>) -> Self {\n W(writer)\n }\n}\n#[doc = \"Field `se_gmac_0_lca` reader - \"]\npub struct SE_GMAC_0_LCA_R(crate::FieldReader<u32, u32>);\nimpl SE_GMAC_0_LCA_R {\n pub(crate) fn new(bits: u32) -> Self {\n SE_GMAC_0_LCA_R(crate::FieldReader::new(bits))\n }\n}\nimpl core::ops::Deref for SE_GMAC_0_LCA_R {\n type Target = crate::FieldReader<u32, u32>;\n #[inline(always)]\n fn deref(&self) -> &Self::Target {\n &self.0\n }\n}\n#[doc = \"Field `se_gmac_0_lca` writer - \"]\npub struct SE_GMAC_0_LCA_W<'a> {\n w: &'a mut W,\n}\nimpl<'a> SE_GMAC_0_LCA_W<'a> {\n #[doc = r\"Writes raw bits to the field\"]\n #[inline(always)]\n pub unsafe fn bits(self, value: u32) -> &'a mut W {\n self.w.bits = (self.w.bits & !0xffff_ffff) | ((value as u32) & 0xffff_ffff);\n self.w\n }\n}\nimpl R {\n #[doc = \"Bits 0:31\"]\n #[inline(always)]\n pub fn se_gmac_0_lca(&self) -> SE_GMAC_0_LCA_R {\n SE_GMAC_0_LCA_R::new((self.bits & 0xffff_ffff) as u32)\n }\n}\nimpl W {\n #[doc = \"Bits 0:31\"]\n #[inline(always)]\n pub fn se_gmac_0_lca(&mut self) -> SE_GMAC_0_LCA_W {\n SE_GMAC_0_LCA_W { w: self }\n }\n #[doc = \"Writes raw bits to the register.\"]</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_>from django.db import migrations\n\n\ndef create_site(apps, schema_editor):\n Site = apps.get_model(\"sites\", \"Site\")\n custom_domain = \"fbs-plus-32496.botics.co\"\n\n site_params = {\n \"name\": \"FBS Plus\",\n }\n if custom_domain:\n site_params[\"domain\"] = custom_domain\n\n Site.objects.update_or_create(defaults=site_params, id=1)\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n (\"sites\", \"0002_alter_domain_unique\"),\n ]\n\n operations = [\n migrations.RunPython(create_site),\n ]\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Go_>// Package sessions handles the storage, management, and validation\n// of pomerium user sessions.\npackage sessions // import \"github.com/pomerium/pomerium/internal/sessions\"\n\nimport (\n\t\"net/http\"\n)\n\n// SessionStore defines an interface for loading, saving, and clearing a session.\ntype SessionStore interface {\n\tSessionLoader\n\tClearSession(http.ResponseWriter, *http.Request)\n\tSaveSession(http.ResponseWriter, *http.Request, interface{}) error\n}\n\n// SessionLoader defines an interface for loading a session.\ntype SessionLoader interface {\n\tLoadSession(*http.Request) (*State, error)\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_SQL_>--@Autor: FLores Fuentes Kevin y Torres Verastegui Jose Antonio\n--@Fecha creación: 19/06/2020\n--@Descripción: Carga inicial de datos\n\nwhenever sqlerror exit rollback;\n@s-15-fx-genera-num-serie.sql\n@s-15-fx-genera-num-notif.sql\n@s-15-fx-genera-folio-pago.sql\nprompt Cargando entidades ...\n@s-09-carga-entidad.sql\nprompt Cargando placas ...\n@s-09-carga-placa.sql\nprompt Cargando marcas ...\n@s-09-carga-marca.sql\nprompt Cargando modelos ...\n@s-09-carga-modelo.sql\nprompt Cargando tipos de licencia ...\n@s-09-carga-tipo-licencia.sql\nprompt Cargando propietarios ...\n@s-09-carga-propietario.sql\nprompt Cargando status vehiculo ...\n@s-09-carga-status-vehiculo.sql\nprompt Cargando vehiculos ...\n@s-09-carga-vehiculo.sql\nprompt Cargando licencias ...\n@s-09-carga-licencia.sql\nprompt Cargando mediciones ...\n@s-09-carga-mediciones.sql\nprompt Cargando notificaciones ...\n@s-09-carga-notificacion.sql\nprompt Cargando pagos ...\n@s-09-carga-pago.sql\nprompt Cargando verificaciones ...\n@s-09-carga-verificacion.sql\nprompt Actualizando fechas de status\n@s-09-carga-fecha-status.sql\ncommit;\nprompt Listo! Carga inicial completa\n\nwhenever sqlerror continue;</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_CMake_># Copyright (c) 2017-2018 Intel Corporation\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n#\r\n#\r\n#\r\n#\r\n\r\n# TBB_FOUND should not be set explicitly. It is defined automatically by CMake.\r\n# Handling of TBB_VERSION is in TBBConfigVersion.cmake.\r\n\r\nif (NOT TBB_FIND_COMPONENTS)\r\n set(TBB_FIND_COMPONENTS \"tbb;tbbmalloc;tbbmalloc_proxy\")\r</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>n = input()\nseven = False\nfor s in n:\n if s == '7':\n seven = True\n break\n\nif seven:\n print('Yes')\nelse:\n print('No')</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C++_>#ifndef BOOST_ARCHIVE_ITERATORS_MB_FROM_WCHAR_HPP\n#define BOOST_ARCHIVE_ITERATORS_MB_FROM_WCHAR_HPP\n\n// MS compatible compilers support #pragma once\n#if defined(_MSC_VER)\n# pragma once\n#endif\n\n/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8\n// mb_from_wchar.hpp\n\n// (C) Copyright 2002 Robert Ramey - http://www.rrsd.com . \n// Use, modification and distribution is subject to the Boost Software\n// License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at\n// http://www.boost.org/LICENSE_1_0.txt)\n\n// See http://www.boost.org for updates, documentation, and revision history.\n\n#include <boost/assert.hpp>\n#include <cstddef> // size_t\n#include <cwchar> // mbstate_t\n\n#include <boost/config.hpp>\n#if defined(BOOST_NO_STDC_NAMESPACE)\nnamespace std{ \n using ::mbstate_t;\n} // namespace std\n#endif\n#include <boost/archive/detail/utf8_codecvt_facet.hpp>\n#include <boost/iterator/iterator_adaptor.hpp>\n\nnamespace boost { \nnamespace archive {\nnamespace iterators {\n\n/////////1/////////2/////////3/////////4/////////5/////////6/////////7/////////8\n// class used by text archives to translate wide strings and to char\n// strings of the currently selected locale\ntemplate<class Base> // the input iterator\nclass mb_from_wchar\n : public boost::iterator_adaptor<\n mb_from_wchar<Base>, \n Base, \n wchar_t,\n single_pass_traversal_tag,\n char\n >\n{\n friend class boost::iterator_core_access;\n\n typedef typename boost::iterator_adaptor<\n mb_from_wchar<Base>, \n Base, \n wchar_t,\n single_pass_traversal_tag,\n char\n > super_t;\n\n typedef mb_from_wchar<Base> this_t;\n\n char dereference_impl() {\n if(! m_full){\n fill();\n m_full = true;\n }\n return m_buffer[m_bnext];\n }\n\n char dereference() const {\n return (const_cast<this_t *>(this))->dereference_impl();\n }\n // test for iterator equality\n bool equal(const mb_from_wchar<Base> & rhs) const {</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C_>/****************************************************************************\n**\n** Copyright (C) 2011 Richard J. Moore <rich@kde.org>\n** Contact: https://www.qt.io/licensing/\n**\n** This file is part of the QtNetwork module of the Qt Toolkit.\n**\n** $QT_BEGIN_LICENSE:LGPL$\n** Commercial License Usage\n** Licensees holding valid commercial Qt licenses may use this file in\n** accordance with the commercial license agreement provided with the\n** Software or, alternatively, in accordance with the terms contained in\n** a written agreement between you and The Qt Company. For licensing terms\n** and conditions see https://www.qt.io/terms-conditions. For further\n** information use the contact form at https://www.qt.io/contact-us.\n**\n** GNU Lesser General Public License Usage\n** Alternatively, this file may be used under the terms of the GNU Lesser\n** General Public License version 3 as published by the Free Software\n** Foundation and appearing in the file LICENSE.LGPL3 included in the\n** packaging of this file. Please review the following information to\n** ensure the GNU Lesser General Public License version 3 requirements\n** will be met: https://www.gnu.org/licenses/lgpl-3.0.html.\n**\n** GNU General Public License Usage\n** Alternatively, this file may be used under the terms of the GNU\n** General Public License version 2.0 or (at your option) the GNU General\n** Public license version 3 or any later version approved by the KDE Free\n** Qt Foundation. The licenses are as published by the Free Software\n** Foundation and appearing in the file LICENSE.GPL2 and LICENSE.GPL3\n** included in the packaging of this file. Please review the following\n** information to ensure the GNU General Public License requirements will\n** be met: https://www.gnu.org/licenses/gpl-2.0.html and\n** https://www.gnu.org/licenses/gpl-3.0.html.\n**\n** $QT_END_LICENSE$\n**\n****************************************************************************/\n\n#ifndef QSSLCERTIFICATEEXTENSION_H\n#define QSSLCERTIFICATEEXTENSION_H\n\n#include <QtCore/qnamespace.h>\n#include <QtCore/qshareddata.h>\n#include <QtCore/qstring.h>\n#include <QtCore/qvariant.h>\n\nQT_BEGIN_NAMESPACE\n\n\n#ifndef QT_NO_SSL\n\nclass QSslCertificateExtensionPrivate;\n\nclass Q_NETWORK_EXPORT QSslCertificateExtension\n{\npublic:\n QSslCertificateExtension();\n QSslCertificateExtension(const QSslCertificateExtension &other);\n#ifdef Q_COMPILER_RVALUE_REFS\n QSslCertificateExtension &operator=(QSslCertificateExtension &&other) Q_DECL_NOTHROW { swap(other); return *this; }\n#endif\n QSslCertificateExtension &operator=(const QSslCertificateExtension &other);\n ~QSslCertificateExtension();\n\n void swap(QSslCertificateExtension &other) Q_DECL_NOTHROW { qSwap(d, other.d); }\n\n QString oid() const;\n QString name() const;\n QVariant value() const;\n bool isCritical() const;\n\n bool isSupported() const;\n\nprivate:\n friend class QSslCertificatePrivate;\n QSharedDataPointer<QSslCertificateExtensionPrivate> d;\n};\n\nQ_DECLARE_SHARED(QSslCertificateExtension)\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_TypeScript_>/* eslint-disable no-console */\nimport Attester from '../../../src/attestation/Attester'\nimport Claimer from '../../../src/claim/Claimer'\nimport Accumulator from '../../../src/attestation/Accumulator'\nimport { Witness } from '../../../src/types/Attestation'\nimport Credential from '../../../src/claim/Credential'\n\nfunction compareClaims(claim: any, claimFromAtt: any): void {\n const [keys1, values1] = Object.entries(claim)\n const [keys2, values2] = Object.entries(claimFromAtt)\n const checkKeys = keys1.filter((key) => keys2.includes(key)).length === 0\n const checkValues =\n values1.filter((val) => values2.includes(val)).length === 0\n if (!checkKeys || !checkValues) {\n console.error('Original claim and claim from attestation do not match!')\n }\n}\n\n// runs a complete issuance process for a claim\nexport async function issuanceProcess({\n attester,\n claimer,\n accumulator,\n claim,\n}: {\n attester: Attester\n claimer: Claimer\n accumulator: Accumulator\n claim: Record<string, any>\n}): Promise<{\n credential: Credential\n witness: Witness\n}> {\n // attester needs to create nonce and context before each attestation\n const {\n message: startAttestationMsg,\n session: attestationSession,\n } = await attester.startAttestation()\n\n // claimer commits to nonce and requests attestation\n const {\n message: attestationRequest,\n session: claimerSession,\n } = await claimer.requestAttestation({\n startAttestationMsg,\n claim,\n attesterPubKey: attester.publicKey,\n })\n\n // the attester might want to inspect the attributes he is about to sign\n const checkClaim = attestationRequest.getClaim()\n compareClaims(claim, checkClaim)\n\n // attester issues credential\n const { attestation, witness } = await attester.issueAttestation({\n attestationSession,\n attestationRequest,\n accumulator,\n })\n\n // claimer builds credential from attestation and claimer session\n const credential = await claimer.buildCredential({\n claimerSession,\n attestation,\n })\n return { credential, witness }\n}\nexport default issuanceProcess\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>https://docs.djangoproject.com/en/stable/howto/deployment/asgi/\n\"\"\"\n\nimport os\n\nos.environ.setdefault(\"DJANGO_CONFIGURATION\", \"Local\")\nos.environ.setdefault(\n \"DJANGO_SETTINGS_MODULE\", \"{{cookiecutter.project_slug}}.settings\"\n)\n\nfrom configurations.asgi import get_asgi_application # noqa isort:skip\n\napplication = get_asgi_application()\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Java_>\t\t\t\t.map(path -> new ResourceLocation(resourceNamespace, Joiner.on('/')\n\t\t\t\t\t.join(path)))\n\t\t\t\t.collect(Collectors.toList());\n\n\t\t\treturn resources;\n\n\t\t} catch (IOException e) {\n\t\t\treturn Collections.emptyList();\n\t\t}\n\t}\n\n\t@Override\n\tpublic Set<String> getNamespaces(ResourcePackType type) {\n\t\tif (type == ResourcePackType.CLIENT_RESOURCES)\n\t\t\treturn Collections.emptySet();\n\n\t\ttry {\n\t\t\tPath root = getResourcePath(type.getDirectory()).toAbsolutePath();\n\t\t\tSet<String> namespaces = Files.walk(root, 1)\n\t\t\t\t.map(path -> root.relativize(path.toAbsolutePath()))\n\t\t\t\t.filter(path -> path.getNameCount() > 0)\n\t\t\t\t.map(p -> p.toString()\n\t\t\t\t\t.replaceAll(\"/$\", \"\"))\n\t\t\t\t.filter(s -> !s.isEmpty())\n\t\t\t\t.collect(Collectors.toSet());\n\n\t\t\treturn namespaces;\n\n\t\t} catch (IOException e) {\n\t\t\treturn Collections.emptySet();\n\t\t}\n\t}\n\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C_>// Licensed under the Apache License, Version 2.0 (the \"License\");\n// you may not use this file except in compliance with the License.\n// You may obtain a copy of the License at\n//\n// http://www.apache.org/licenses/LICENSE-2.0\n//\n// Unless required by applicable law or agreed to in writing, software\n// distributed under the License is distributed on an \"AS IS\" BASIS,\n// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n// See the License for the specific language governing permissions and\n// limitations under the License.\n\n#ifndef MACE_OPS_COMMON_CONV_POOL_2D_UTIL_H_\n#define MACE_OPS_COMMON_CONV_POOL_2D_UTIL_H_\n\n#include \"mace/core/tensor.h\"\n\nnamespace mace {\n\nenum Padding {\n VALID = 0, // No padding\n SAME = 1, // Pads with half the filter size (rounded down) on both sides</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom collections import defaultdict\nimport os.path\nimport configparser\nimport argparse\nimport json\nimport sys\nfrom googleapiclient import discovery\n\n\n\ndef list_instances(project='silken-period-262510', zone='europe-west1-b', use_gcloud_config=True, use_cache=False,\n cache_source='dynamic_inventory.cache', update_cache=False,\n cache_destination='dynamic_inventory.cache'):\n # Retrieving mycfg configuration from gcloud SDK</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C_> * - ESP_ERR_TCPIP_ADAPTER_INVALID_PARAMS\n */\nesp_err_t tcpip_adapter_create_ip6_linklocal(tcpip_adapter_if_t tcpip_if);\n\n/**\n * @brief Get interface link-local IPv6 address\n *\n * If the specified interface is up and a preferred link-local IPv6 address\n * has been created for the interface, return a copy of it.\n *\n * @param[in] tcpip_if Interface to get link-local IPv6 address\n * @param[out] if_ip6 IPv6 information will be returned in this argument if successful.\n *\n * @return\n * - ESP_OK\n * - ESP_FAIL If interface is down, does not have a link-local IPv6 address, or the link-local IPv6 address is not a preferred address.\n */\nesp_err_t tcpip_adapter_get_ip6_linklocal(tcpip_adapter_if_t tcpip_if, ip6_addr_t *if_ip6);\n\n#if 0\nesp_err_t tcpip_adapter_get_mac(tcpip_adapter_if_t tcpip_if, uint8_t *mac);\n\nesp_err_t tcpip_adapter_set_mac(tcpip_adapter_if_t tcpip_if, uint8_t *mac);\n#endif\n\n/**\n * @brief Get DHCP Server status\n *\n * @param[in] tcpip_if Interface to get status of DHCP server.\n * @param[out] status If successful, the status of the DHCP server will be returned in this argument.\n *\n * @return\n * - ESP_OK\n */\nesp_err_t tcpip_adapter_dhcps_get_status(tcpip_adapter_if_t tcpip_if, tcpip_adapter_dhcp_status_t *status);\n\n/**\n * @brief Set or Get DHCP server option\n *\n * @param[in] opt_op TCPIP_ADAPTER_OP_SET to set an option, TCPIP_ADAPTER_OP_GET to get an option.\n * @param[in] opt_id Option index to get or set, must be one of the supported enum values.\n * @param[inout] opt_val Pointer to the option parameter.\n * @param[in] opt_len Length of the option parameter.\n *\n * @return\n * - ESP_OK\n * - ESP_ERR_TCPIP_ADAPTER_INVALID_PARAMS\n * - ESP_ERR_TCPIP_ADAPTER_DHCP_ALREADY_STOPPED\n * - ESP_ERR_TCPIP_ADAPTER_DHCP_ALREADY_STARTED\n */\nesp_err_t tcpip_adapter_dhcps_option(tcpip_adapter_dhcp_option_mode_t opt_op, tcpip_adapter_dhcp_option_id_t opt_id, void *opt_val, uint32_t opt_len);\n\n/**\n * @brief Start DHCP server\n *\n * @note Currently DHCP server is only supported on the Wi-Fi AP interface.\n *\n * @param[in] tcpip_if Interface to start DHCP server. Must be TCPIP_ADAPTER_IF_AP.\n *\n * @return\n * - ESP_OK\n * - ESP_ERR_TCPIP_ADAPTER_INVALID_PARAMS\n * - ESP_ERR_TCPIP_ADAPTER_DHCP_ALREADY_STARTED\n */\nesp_err_t tcpip_adapter_dhcps_start(tcpip_adapter_if_t tcpip_if);\n\n/**\n * @brief Stop DHCP server\n *\n * @note Currently DHCP server is only supported on the Wi-Fi AP interface.\n *\n * @param[in] tcpip_if Interface to stop DHCP server. Must be TCPIP_ADAPTER_IF_AP.\n *\n * @return\n * - ESP_OK\n * - ESP_ERR_TCPIP_ADAPTER_INVALID_PARAMS\n * - ESP_ERR_TCPIP_ADAPTER_DHCP_ALREADY_STOPPED\n * - ESP_ERR_TCPIP_ADAPTER_IF_NOT_READY\n */\nesp_err_t tcpip_adapter_dhcps_stop(tcpip_adapter_if_t tcpip_if);\n\n/**\n * @brief Get DHCP client status\n *</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Perl_>=head1 LICENSE\n\nCopyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute\nCopyright [2016-2022] EMBL-European Bioinformatics Institute\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n\n=cut\n\npackage EnsEMBL::Web::Component::StructuralVariation::PopulationFrequency;\n\nuse strict;\nuse Bio::EnsEMBL::Variation::Utils::Constants qw(%VARIATION_CLASSES);\n\nuse base qw(EnsEMBL::Web::Component::StructuralVariation);\n\nsub _init {\n my $self = shift;\n $self->cacheable(0);\n $self->ajaxable(1);\n}\n\nsub content {\n my $self = shift;\n my $object = $self->object;\n my $sv_object = $object->Obj;\n \n my $html;\n \n my $svpf_adaptor = $sv_object->adaptor->db->get_StructuralVariationPopulationFrequency;\n\n my $frequency_data = $svpf_adaptor->fetch_all_by_StructuralVariation($sv_object);\n\n my $table_rows = $self->table_data($frequency_data);\n my $table = $self->new_table([], [], { data_table => 1 });\n\n if (scalar(@$table_rows) != 0) {\n $self->add_table_columns($table);\n $table->add_rows(@$table_rows);\n\n my %display_pop;\n my %pops = map { $_->population->name => $_->population } @$frequency_data;\n foreach my $pop_name (keys(%pops)) {\n\n my $display_name = $pops{$pop_name}->display_group_name;\n $display_pop{$display_name} = 1 if ($display_name);\n }\n \n if (scalar(keys(%display_pop)) == 1) {\n my $pop_display = (keys(%display_pop))[0];\n $html .= qq{<h3>$pop_display allele frequency</h3>};\n }\n else {\n $html .= qq{<h3>Allele frequency</h3>};\n }\n $html .= $table->render;\n }\n return $html;\n}\n\n\nsub add_table_columns {\n my ($self, $table) = @_;\n\n $table->add_columns(\n { key => 'pop_name', title => 'Population' , align => 'left', sort => 'none' },\n { key => 'pop_size', title => 'Size' , align => 'left', sort => 'none' },\n { key => 'a_freqs', title => 'Allele type: frequency (count)' , align => 'left', sort => 'none' },\n { key => 'freqs', title => 'Non-reference frequency (count)', align => 'left', sort => 'none' },\n );\n\n return $table;\n}\n\nsub table_data {\n my ($self, $data) = @_;\n\n my $hub = $self->hub;\n my $object = $self->object;\n \n my (@rows, %pops, %pop_data, $tree, $all);\n\n # Get population structure\n foreach my $svpf (@$data) {\n\n my $pop = $svpf->population;\n my $pname = $pop->name;\n my $pop_id = $pop->dbID;\n $pop_data{$pop_id} = $svpf;\n \n\n if ($pname =~ /(\\W+|_)ALL/) {\n $all = $pop_id;\n next;\n }\n\n my $hash = $self->extra_pop($pop,\"super\");\n my ($super) = keys %{$hash||{}};\n if ($super) {\n $tree->{$super}{'children'}{$pop_id} = $pname;\n $tree->{$super}{'name'} = $hash->{$super}{'Name'} if (!$tree->{$super}{'name'});\n }\n else {\n $tree->{$pop_id}{'name'} = $pname;\n } \n }\n\n my @ids;\n push @ids, $all if $all;\n my @super_order = sort {$tree->{$a}{'name'} cmp $tree->{$b}{'name'}} keys (%$tree);\n foreach my $super (@super_order) {\n next if ($all && $super == $all); # Skip the 3 layers structure, which leads to duplicated rows\n push @ids, $super;\n my $children = $tree->{$super}{'children'} || {};\n push @ids, sort {$children->{$a} cmp $children->{$b}} keys (%$children);\n }\n\n\n # Loop over the populations\n foreach my $pop_id (@ids) {\n\n my $svpf = $pop_data{$pop_id};\n\n next if (!$svpf);\n\n my ($row_class, $group_member);\n if ($svpf->name =~ /(\\W+|_)ALL/ && $tree->{$pop_id}{'children'}) {\n $row_class = 'supergroup';\n }\n elsif ($tree->{$pop_id}{'children'}) {\n $row_class = 'subgroup';\n }\n elsif (scalar keys %$tree > 1) {\n $group_member = 1;\n }\n\n my @pop_parts = split(':',$svpf->name);\n my $pop_name = (@pop_parts > 2) ? $pop_parts[$#pop_parts] : $pop_parts[0].':<b>'.$pop_parts[1].'</b>';\n my $pop_desc = $svpf->description;\n my $pop_size = $svpf->size;\n my $global_freq = sprintf(\"%.4f\",$svpf->frequency);\n my $global_allele_count = 0;\n my $freqs_by_SO_term = $svpf->frequencies_by_class_SO_term;\n my $class_freq = '';\n foreach my $SO_term (sort(keys(%$freqs_by_SO_term))) {\n my $colour = $object->get_class_colour($SO_term);\n my $freq = sprintf(\"%.4f\",$freqs_by_SO_term->{$SO_term});\n\n my $allele_count = 0;\n \n # Loop over Sample IDs\n foreach my $sample_id (keys(%{$svpf->{samples_class}{$SO_term}})) {\n $allele_count += ($svpf->{samples_class}{$SO_term}{$sample_id} eq 'homozygous') ? 2 : 1;\n }\n $class_freq .= sprintf('<p style=\"margin-bottom:0px\"><span class=\"structural-variation-allele\" style=\"background-color:%s\"></span>'.\n '<span style=\"margin-bottom:2px\">%s</span>: '.\n '<span style=\"font-weight:bold\">%s (%i)</span></p>',\n $colour, $VARIATION_CLASSES{$SO_term}{'display_term'}, $freq, $allele_count);\n $global_allele_count += $allele_count;\n }\n\n if ($pop_desc) {\n $pop_desc = $self->strip_HTML($pop_desc);\n $pop_name = qq{<span class=\"_ht ht\" title=\"$pop_desc\">$pop_name</span>};\n }\n\n my $row = {\n pop_name => $group_member ? '&nbsp;&nbsp;'.$pop_name : $pop_name,\n pop_size => $pop_size,\n a_freqs => $class_freq,\n freqs => \"$global_freq ($global_allele_count)\"\n };\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C#_> public abstract class DocumentEventContent : IBoxEventContent\n {\n public DocumentId DocumentId { get; set; }\n public string DocumentCirculationId { get; set; }\n }\n}</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>A,B = map(int, input().split())\nif B % A == 0:\n print(A + B)\nelse:\n print(B - A)</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_SML_>val anyChar = P.anyToken\nfun string s = let val n = String.size s\n fun go i = if i >= n then\n P.pure s\n else\n char (String.sub (s, i)) >> go (i + 1)\n in go 0\n end\nend\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C_>/*\n * Copyright (c) 2010 Your File System Inc. All rights reserved.\n *\n * Redistribution and use in source and binary forms, with or without\n * modification, are permitted provided that the following conditions\n * are met:\n * 1. Redistributions of source code must retain the above copyright\n * notice, this list of conditions and the following disclaimer.\n * 2. Redistributions in binary form must reproduce the above copyright\n * notice, this list of conditions and the following disclaimer in the\n * documentation and/or other materials provided with the distribution.\n *\n * THIS SOFTWARE IS PROVIDED BY THE AUTHOR `AS IS'' AND ANY EXPRESS OR\n * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\n * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.\n * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,\n * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT\n * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF\n * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n */\n\n#include <afsconfig.h>\n#include <afs/param.h>\n\n#include <roken.h>\n\n#include <opr/queue.h>\n\n/* Need rx/rx.h to get working assert(), used by LOCK_GLOBAL_MUTEX */\n#include <rx/rx.h>\n#include <rx/rx_atomic.h>\n#ifdef AFS_RXGK_ENV\n#include <rx/rxgk.h>\n#endif\n\n#include <afs/opr.h>\n#include <afs/stds.h>\n#include <afs/pthread_glock.h>\n#include <afs/afsutil.h>\n\n#include \"cellconfig.h\"\n#include \"keys.h\"\n#include \"internal.h\"\n\nstruct afsconf_typedKey {\n rx_atomic_t refcnt;\n afsconf_keyType type;\n int kvno;\n int subType;\n struct rx_opaque key;\n};\n\nstatic struct afsconf_typedKey *afsconf_typedKey_blank(void);\n\n/* Memory storage for keyfile contents. */\n\nstruct keyTypeList {\n struct opr_queue link;\n afsconf_keyType type;\n struct opr_queue kvnoList;\n};\n\nstruct kvnoList {\n struct opr_queue link;\n int kvno;\n struct opr_queue subTypeList;\n};\n\nstruct subTypeList {\n struct opr_queue link;\n int subType;\n struct afsconf_typedKey *key;\n};\n\nstatic int\nlistToArray(struct kvnoList *kvnoEntry, struct afsconf_typedKeyList **keys)\n{\n struct afsconf_typedKeyList *retval;\n struct opr_queue *cursor;\n int i;\n\n /* Allocate space for the keys we've got stored */\n retval = malloc(sizeof(struct afsconf_typedKeyList));\n retval->nkeys = opr_queue_Count(&kvnoEntry->subTypeList);\n\n if (retval->nkeys > 0) {\n retval->keys = calloc(retval->nkeys, sizeof(struct afsconf_typedKey *));\n\n\ti = 0;\n for(opr_queue_Scan(&kvnoEntry->subTypeList, cursor)) {\n\t struct subTypeList *entry;\n\n\t entry = opr_queue_Entry(cursor, struct subTypeList, link);\n\t retval->keys[i] = afsconf_typedKey_get(entry->key);\n\t i++;\n }\n } else {\n\tretval->keys = NULL;\n }\n\n *keys = retval;\n return 0;\n}\n\nstatic struct keyTypeList *\nfindByType(struct afsconf_dir *dir, afsconf_keyType type)\n{\n struct opr_queue *cursor;\n struct keyTypeList *entry = NULL;\n\n for (opr_queue_Scan(&dir->keyList, cursor)) {\n\tentry = opr_queue_Entry(cursor, struct keyTypeList, link);\n\tif (entry->type >= type)\n\t break;\n }\n if (entry == NULL || entry->type != type)\n\treturn NULL;\n\n return entry;\n}\n\nstatic struct kvnoList *\nfindInTypeList(struct keyTypeList *parent, int kvno)\n{\n struct opr_queue *cursor;\n struct kvnoList *entry = NULL;\n\n for (opr_queue_Scan(&parent->kvnoList, cursor)) {\n\tentry = opr_queue_Entry(cursor, struct kvnoList, link);\n\tif (entry->kvno >= kvno)\n\t break;\n }\n if (entry == NULL || entry->kvno != kvno)\n\treturn NULL;\n\n return entry;\n}\n\nstatic struct kvnoList *\nfindByKvno(struct afsconf_dir *dir, afsconf_keyType type, int kvno)\n{\n struct keyTypeList *entry;\n entry = findByType(dir, type);\n\n if (entry == NULL)\n\treturn NULL;\n\n return findInTypeList(entry, kvno);\n}\n\nstatic struct subTypeList *\nfindInKvnoList(struct kvnoList *parent, int subType)\n{\n struct opr_queue *cursor;\n struct subTypeList *entry = NULL;\n\n for (opr_queue_Scan(&parent->subTypeList, cursor)) {\n\tentry = opr_queue_Entry(cursor, struct subTypeList, link);\n\tif (entry->subType >= subType)\n\t break;\n }\n if (entry == NULL || entry->subType != subType)\n\treturn NULL;\n\n return entry;\n}\n\nstatic struct subTypeList *\nfindBySubType(struct afsconf_dir *dir, afsconf_keyType type, int kvno,\n\t int subType)\n{\n struct kvnoList *entry;\n\n entry = findByKvno(dir, type, kvno);\n if (entry == NULL)\n\treturn NULL;\n\n return findInKvnoList(entry, subType);\n}\n\n\n/* Add key. */\nstatic int\naddMemoryKey(struct afsconf_dir *dir, struct afsconf_typedKey *key,\n\t int overwrite)\n{\n struct opr_queue *cursor;\n struct keyTypeList *typeEntry = NULL;\n struct kvnoList *kvnoEntry = NULL;\n struct subTypeList *subType = NULL;\n\n /* Find the place in the keyType list to insert the key into */\n for (opr_queue_Scan(&dir->keyList, cursor)) {\n\ttypeEntry = opr_queue_Entry(cursor, struct keyTypeList, link);\n\tif (typeEntry->type >= key->type)\n\t break;\n }\n\n if (typeEntry == NULL || typeEntry->type != key->type) {\n\tstruct keyTypeList *list;\n\n\tlist = malloc(sizeof(struct keyTypeList));\n\topr_queue_Init(&list->kvnoList);\n\tlist->type = key->type;\n\topr_queue_InsertBefore(cursor, &list->link);\n\ttypeEntry = list;\n }\n\n /* And the place in the kvno list */\n for (opr_queue_Scan(&typeEntry->kvnoList, cursor)) {\n\tkvnoEntry = opr_queue_Entry(cursor, struct kvnoList, link);\n\tif (kvnoEntry->kvno >= key->kvno)\n\t break;\n }\n\n if (kvnoEntry == NULL || kvnoEntry->kvno != key->kvno) {</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_>from setuptools import setup, find_packages\n\nsetup(\n name=\"home-assistant-frontend\",\n version=\"20210302.0\",\n description=\"The Home Assistant frontend\",\n url=\"https://github.com/home-assistant/home-assistant-polymer\",\n author=\"The Home Assistant Authors\",\n author_email=\"hello@home-assistant.io\",\n license=\"Apache License 2.0\",\n packages=find_packages(include=[\"hass_frontend\", \"hass_frontend.*\"]),\n include_package_data=True,\n zip_safe=False,\n)\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Scala_>package com.github.j5ik2o.ak.kcl.dyanmodb.streams\n\nimport akka.stream.scaladsl.Source\nimport akka.stream.stage.AsyncCallback\nimport com.amazonaws.services.cloudwatch.{ AmazonCloudWatch, AmazonCloudWatchClient }\nimport com.amazonaws.services.dynamodbv2.{ AmazonDynamoDB, AmazonDynamoDBClient }\nimport com.amazonaws.services.dynamodbv2.streamsadapter.{ AmazonDynamoDBStreamsAdapterClient, StreamsWorkerFactory }\nimport com.amazonaws.services.kinesis.clientlibrary.interfaces.v2.IRecordProcessorFactory\nimport com.amazonaws.services.kinesis.clientlibrary.lib.worker.{ KinesisClientLibConfiguration, Worker }\nimport com.amazonaws.services.kinesis.clientlibrary.types.{ InitializationInput, ShutdownInput }\nimport com.amazonaws.services.kinesis.metrics.interfaces.IMetricsFactory\nimport com.amazonaws.services.kinesis.model.Record\nimport com.github.j5ik2o.ak.kcl.dsl.KCLSource\nimport com.github.j5ik2o.ak.kcl.stage.KCLSourceStage.RecordSet</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_SQL_>-- Step 1: Find John's friends\r\nUSE socialnetwork\r\nGO\r\nSELECT Person2.name AS FriendName\r\nFROM Person Person1, friendOf, Person Person2\r\nWHERE MATCH(Person1-(friendOf)->Person2)\r\nAND Person1.name = 'John';\r\nGO\r\n\r\n-- Step 2: Find the friends of John's friends\r\nSELECT person1.name +' is friends with ' + person2.name, + 'who is friends with '+ person3.name\r\nFROM Person person1, friendOf friend1, Person person2, friendOf friend2, Person person3\r\nWHERE MATCH(person1-(friend1)->person2-(friend2)->person3)\r\nAND person1.name = 'John';\r\nGO</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Kotlin_>package com.lucasurbas.masterdetail.data\n\n/**\n * Created on 23/10/2018.\n */</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C++_> for (int i = 1; i <= n; ++i) {\n if (s[i - 1] == 'a') {\n dp[0][i] = dp[0][i - 1];\n dp[1][i] = dp[1][i - 1] + 1;\n } else {\n dp[0][i] = dp[0][i - 1] + 1;\n dp[1][i] = min(dp[0][i - 1], dp[1][i - 1]);\n }\n }\n return min(dp[1][n], dp[0][n]);\n }\n};\n\nint main() {\n Solution s;\n cout << s.minimumDeletions(\"aababbab\") << endl;\n}</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Bash_>#!/bin/bash\r\n\r\ncurl -sS https://getcomposer.org/installer | php -d suhosin.executor.include.whitelist=phar\r\n\r\nphp -d suhosin.executor.include.whitelist=phar ./composer.phar install\r\n\r\nrake --trace\r\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C#_> {\n }\n}</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Lisp_>(in-package #:net.mfiano.lisp.stripe)\n\n(define-object balance ()\n available\n pending)\n\n(define-object balance-funds ()\n amount\n currency\n bank-account\n card)\n\n(defmethod initialize-instance :after ((instance balance-funds) &key data\n &allow-other-keys)\n (destructuring-bind (&key source-types &allow-other-keys) data\n (destructuring-bind (&key bank-account card) source-types\n (reinitialize-instance\n instance\n :bank-account (or bank-account 0)\n :card (or card 0)))))\n\n(defmethod initialize-instance :after ((instance balance) &key data\n &allow-other-keys)\n (destructuring-bind (&key available pending &allow-other-keys) data\n (reinitialize-instance\n instance\n :available (mapcar\n (lambda (x)\n (make-instance 'balance-funds :data x))\n available)\n :pending (mapcar\n (lambda (x)\n (make-instance 'balance-funds :data x))\n pending))))\n\n(define-query retrieve-balance (:type balance)\n (:get \"balance\"))\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_Python_># coding: utf-8\n#\n# Copyright 2022 :Barry-Thomas-Paul: Moss\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\")\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http: // www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n# Service Class\n# this is a auto generated file generated by Cheetah\n# Libre Office Version: 7.3\n# Namespace: com.sun.star.chart2.data\n\nfrom ....lo.chart2.data.tabular_data_provider_arguments import TabularDataProviderArguments as TabularDataProviderArguments\n\n__all__ = ['TabularDataProviderArguments']\n\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>s = list(input())\nt = list(input())\n\n\ns.sort()\nt.sort(reverse=True)\n\na = []\na.append(s)\na.append(t)\n\nif a == sorted(a) and s != t:\n print(\"Yes\")\nelse:\n print(\"No\")\n \n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_TypeScript_>/*---------------------------------------------------------------------------------------------\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License. See License.txt in the project root for license information.\n *--------------------------------------------------------------------------------------------*/\n\nimport { IWorkbenchConstructionOptions, create, ICredentialsProvider, IURLCallbackProvider, IWorkspaceProvider, IWorkspace, IWindowIndicator, IHomeIndicator, IProductQualityChangeHandler, ISettingsSyncOptions } from 'vs/workbench/workbench.web.api';\nimport { URI, UriComponents } from 'vs/base/common/uri';\nimport { Event, Emitter } from 'vs/base/common/event';\nimport { generateUuid } from 'vs/base/common/uuid';\nimport { CancellationToken } from 'vs/base/common/cancellation';\nimport { streamToBuffer } from 'vs/base/common/buffer';\nimport { Disposable } from 'vs/base/common/lifecycle';\nimport { request } from 'vs/base/parts/request/browser/request';\nimport { isFolderToOpen, isWorkspaceToOpen } from 'vs/platform/windows/common/windows';\nimport { isEqual } from 'vs/base/common/resources';\nimport { isStandalone } from 'vs/base/browser/browser';\nimport { localize } from 'vs/nls';\nimport { Schemas } from 'vs/base/common/network';\nimport product from 'vs/platform/product/common/product';\nimport { parseLogLevel } from 'vs/platform/log/common/log';\nimport { getBrowserUrl, replaceBrowserUrl } from 'vs/gogs1s/util';</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_TypeScript_>import { NiceForm } from \"./deps.ts\";\nimport \"./form.ts\";\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_TypeScript_> expect(spy).toHaveBeenCalledWith('queue_overflow', 'transaction');\n }\n });\n\n it('passes in headers', async () => {\n transport = new Transports.FetchTransport(\n {\n dsn: testDsn,\n headers: {\n Accept: 'application/json',\n },\n },\n window.fetch,\n );\n const response = { status: 200, headers: new Headers() };\n\n window.fetch.mockImplementation(() => Promise.resolve(response));\n\n const res = await transport.sendEvent(eventPayload);\n\n expect((res as Response).status).toBe('success');\n expect(fetch).toHaveBeenCalledWith(storeUrl, {\n body: JSON.stringify(eventPayload),\n headers: {\n Accept: 'application/json',\n },\n method: 'POST',\n referrerPolicy: 'origin',\n });\n });\n\n it('passes in fetch parameters', async () => {\n transport = new Transports.FetchTransport(\n {\n dsn: testDsn,\n fetchParameters: {\n credentials: 'include',\n },\n },\n window.fetch,\n );\n const response = { status: 200, headers: new Headers() };\n\n window.fetch.mockImplementation(() => Promise.resolve(response));\n\n const res = await transport.sendEvent(eventPayload);\n\n expect((res as Response).status).toBe('success');\n expect(fetch).toHaveBeenCalledWith(storeUrl, {\n body: JSON.stringify(eventPayload),\n credentials: 'include',\n method: 'POST',\n referrerPolicy: 'origin',\n });\n });\n\n describe('Rate-limiting', () => {\n it('back-off using Retry-After header', async () => {\n const retryAfterSeconds = 10;\n const beforeLimit = Date.now();\n const withinLimit = beforeLimit + (retryAfterSeconds / 2) * 1000;\n const afterLimit = beforeLimit + retryAfterSeconds * 1000;\n\n jest\n .spyOn(Date, 'now')\n // 1st event - updateRateLimits - false\n .mockImplementationOnce(() => beforeLimit)\n // 1st event - _handleRateLimit\n .mockImplementationOnce(() => beforeLimit)\n // 1st event - _handleRateLimit\n .mockImplementationOnce(() => beforeLimit)\n // 2nd event - _isRateLimited - true\n .mockImplementationOnce(() => withinLimit)\n // 3rd event - _isRateLimited - false\n .mockImplementationOnce(() => afterLimit)\n // 3rd event - _handleRateLimit\n .mockImplementationOnce(() => afterLimit);\n\n const headers = new Headers();\n headers.set('Retry-After', `${retryAfterSeconds}`);\n window.fetch.mockImplementation(() => Promise.resolve({ status: 429, headers }));\n\n try {\n await transport.sendEvent(eventPayload);\n throw new Error('unreachable!');\n } catch (res) {\n expect((res as Response).status).toBe(429);\n expect((res as Response).reason).toBeUndefined();\n expect(fetch).toHaveBeenCalled();\n }\n\n try {\n await transport.sendEvent(eventPayload);\n throw new Error('unreachable!');\n } catch (res) {\n expect((res as Response).status).toBe(429);\n expect((res as Response).reason).toBe(\n `Transport for event requests locked till ${new Date(afterLimit)} due to too many requests.`,\n );\n expect(fetch).toHaveBeenCalled();\n }\n\n window.fetch.mockImplementation(() => Promise.resolve({ status: 200, headers: new Headers() }));\n\n const eventRes = await transport.sendEvent(eventPayload);\n expect(eventRes.status).toBe('success');\n expect(fetch).toHaveBeenCalledTimes(2);\n });\n\n it('back-off using X-Sentry-Rate-Limits with single category', async () => {\n const retryAfterSeconds = 10;\n const beforeLimit = Date.now();\n const withinLimit = beforeLimit + (retryAfterSeconds / 2) * 1000;\n const afterLimit = beforeLimit + retryAfterSeconds * 1000;\n\n jest\n .spyOn(Date, 'now')\n // 1st event - _isRateLimited - false\n .mockImplementationOnce(() => beforeLimit)\n // 1st event - _handleRateLimit\n .mockImplementationOnce(() => beforeLimit)\n // 1st event - _isRateLimited\n .mockImplementationOnce(() => beforeLimit)\n // 1st event - _handleRateLimit\n .mockImplementationOnce(() => beforeLimit)\n // 2nd event - _isRateLimited - false (different category)\n .mockImplementationOnce(() => withinLimit)\n // 2nd event - _handleRateLimit\n .mockImplementationOnce(() => withinLimit)\n // 3rd event - _isRateLimited - true\n .mockImplementationOnce(() => withinLimit)\n // 4th event - _isRateLimited - false\n .mockImplementationOnce(() => afterLimit)\n // 4th event - _handleRateLimit\n .mockImplementationOnce(() => afterLimit);\n\n const headers = new Headers();\n headers.set('X-Sentry-Rate-Limits', `${retryAfterSeconds}:error:scope`);\n window.fetch.mockImplementation(() => Promise.resolve({ status: 429, headers }));\n\n try {\n await transport.sendEvent(eventPayload);\n throw new Error('unreachable!');\n } catch (res) {\n expect((res as Response).status).toBe(429);\n expect((res as Response).reason).toBeUndefined();\n expect(fetch).toHaveBeenCalled();\n }\n\n window.fetch.mockImplementation(() => Promise.resolve({ status: 200, headers: new Headers() }));\n\n const transactionRes = await transport.sendEvent(transactionPayload);\n expect(transactionRes.status).toBe('success');\n expect(fetch).toHaveBeenCalledTimes(2);\n\n try {\n await transport.sendEvent(eventPayload);\n throw new Error('unreachable!');\n } catch (res) {\n expect((res as Response).status).toBe(429);\n expect((res as Response).reason).toBe(\n `Transport for event requests locked till ${new Date(afterLimit)} due to too many requests.`,\n );\n expect(fetch).toHaveBeenCalledTimes(2);\n }\n\n const eventRes = await transport.sendEvent(eventPayload);\n expect(eventRes.status).toBe('success');\n expect(fetch).toHaveBeenCalledTimes(3);\n });\n\n it('back-off using X-Sentry-Rate-Limits with multiple categories', async () => {\n const retryAfterSeconds = 10;\n const beforeLimit = Date.now();\n const withinLimit = beforeLimit + (retryAfterSeconds / 2) * 1000;\n const afterLimit = beforeLimit + retryAfterSeconds * 1000;\n\n jest\n .spyOn(Date, 'now')\n // 1st event - _isRateLimited - false\n .mockImplementationOnce(() => beforeLimit)\n // 1st event - updateRateLimits\n .mockImplementationOnce(() => beforeLimit)\n // 1st event - _isRateLimited\n .mockImplementationOnce(() => beforeLimit)\n // 2nd event - _isRateLimited - true (event category)\n .mockImplementationOnce(() => withinLimit)\n // 3rd event - _isRateLimited - true (transaction category)\n .mockImplementationOnce(() => withinLimit)\n // 4th event - _isRateLimited - false (event category)\n .mockImplementationOnce(() => afterLimit)\n // 4th event - _handleRateLimit\n .mockImplementationOnce(() => afterLimit)\n // 5th event - _isRateLimited - false (transaction category)\n .mockImplementationOnce(() => afterLimit)\n // 5th event - _handleRateLimit\n .mockImplementationOnce(() => afterLimit);\n\n const headers = new Headers();\n headers.set('X-Sentry-Rate-Limits', `${retryAfterSeconds}:error;transaction:scope`);\n window.fetch.mockImplementation(() => Promise.resolve({ status: 429, headers }));\n\n try {\n await transport.sendEvent(eventPayload);\n throw new Error('unreachable!');\n } catch (res) {\n expect((res as Response).status).toBe(429);\n expect((res as Response).reason).toBeUndefined();</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Kotlin_>import de.undercouch.gradle.tasks.download.Download\nimport org.gradle.crypto.checksum.Checksum\nimport org.jetbrains.kotlin.gradle.tasks.KotlinCompile\nimport org.jetbrains.kotlin.gradle.tasks.KotlinTest\n\nplugins {\n kotlin(\"multiplatform\") version \"1.3.72\"\n `cpp-library`\n `maven-publish`\n id(\"org.gradle.crypto.checksum\") version \"1.1.0\"\n id(\"de.undercouch.download\") version \"4.1.1\"\n}\n\nval coroutinesVersion = \"1.4.1\"\n\nbuildscript {\n dependencies {\n classpath(\"org.kohsuke:github-api:1.116\")\n }\n repositories {\n mavenCentral()\n }\n}\n\nval skiko = SkikoProperties(rootProject)\nval debug = false\nval buildType = if (debug) SkiaBuildType.DEBUG else SkiaBuildType.RELEASE\n\nallprojects {\n group = \"org.jetbrains.skiko\"\n version = skiko.deployVersion\n}\n\nrepositories {\n mavenCentral()\n maven {\n url = uri(\"https://dl.bintray.com/kotlin/kotlin-eap\")\n }\n}\n\nval skiaZip = run {\n val zipName = skiko.skiaReleaseForTargetOS + \".zip\"\n val zipFile = skiko.dependenciesDir.resolve(\"skia/${zipName.substringAfterLast('/')}\")\n\n tasks.register(\"downloadSkia\", Download::class) {\n onlyIf { skiko.skiaDir == null && !zipFile.exists() }\n inputs.property(\"skia.release.for.target.os\", skiko.skiaReleaseForTargetOS)\n src(\"https://github.com/JetBrains/skia-build/releases/download/$zipName\")\n dest(zipFile)\n onlyIfModified(true)\n }.map { zipFile }\n}\n\nfun String.insertAfterFirst(substring: String, stringToInsert: String): String =\n let { orig ->\n buildString {\n var i = orig.indexOf(substring)\n if (i < 0) return orig\n\n i += substring.length\n\n append(orig.substring(0, i))\n append(stringToInsert)\n append(orig.substring(i))\n }\n }\n\nfun AbstractCopyTask.configureSkiaCopy(targetDir: File) {\n into(targetDir)\n}\n\nval skiaDir = run {\n if (skiko.skiaDir != null) {\n tasks.register(\"skiaDir\", DefaultTask::class) {\n // dummy task to simplify usage of the resulting provider (see `else` branch)\n // if a file provider is not created from a task provider,\n // then it cannot be used instead of a task in `dependsOn` clauses of other tasks.\n // e.g. the resulting `skiaDir` could not be used in `dependsOn` of CppCompile configuration\n enabled = false\n }.map { skiko.skiaDir!! }\n } else {\n val targetDir = skiko.dependenciesDir.resolve(\"skia/skia\")\n tasks.register(\"unzipSkia\", Copy::class) {\n from(skiaZip.map { zipTree(it) })\n configureSkiaCopy(targetDir)\n }.map { targetDir }\n }\n}\n\nval skijaZip = run {\n val zipFile = skiko.dependenciesDir.resolve(\"skija/${skiko.skijaCommitHash}.zip\")\n\n tasks.register(\"downloadSkija\", Download::class) {\n onlyIf { skiko.skijaDir == null && !zipFile.exists() }\n inputs.property(\"skija.commit.hash\", skiko.skijaCommitHash)\n src(\"https://github.com/JetBrains/skija/archive/${skiko.skijaCommitHash}.zip\")\n dest(zipFile)\n onlyIfModified(true)\n }.map { zipFile }\n}\n\nval skijaDir = run {\n if (skiko.skijaDir != null) {\n tasks.register(\"skijaDir\", DefaultTask::class) {\n enabled = false\n }.map { skiko.skijaDir!! }\n } else {\n val skijaDest = skiko.dependenciesDir.resolve(\"skija/skija\").apply { mkdirs() }\n tasks.register(\"unzipSkija\", Copy::class) {\n from(skijaZip.map { zipTree(it) }) {\n include(\"skija-${skiko.skijaCommitHash}/**\")\n eachFile {\n // drop skija-<COMMIT> subdir\n relativePath = RelativePath(true, *relativePath.segments.drop(1).toTypedArray())\n }\n includeEmptyDirs = false\n }\n into(skijaDest)\n }.map { skijaDest }\n }\n}\n\nval lombok by configurations.creating\nval jetbrainsAnnotations by configurations.creating\ndependencies {\n lombok(\"org.projectlombok:lombok:1.18.16\")\n jetbrainsAnnotations(\"org.jetbrains:annotations:19.0.0\")\n}\nval skijaSrcDir = run {\n val delombokSkijaSrcDir = project.file(\"src/jvmMain/java\")\n tasks.register(\"delombokSkija\", JavaExec::class) {\n classpath = lombok + jetbrainsAnnotations\n main = \"lombok.launch.Main\"\n args(\"delombok\", skijaDir.get().resolve(\"shared/src/main/java\"), \"-d\", delombokSkijaSrcDir)\n inputs.dir(skijaDir)\n outputs.dir(delombokSkijaSrcDir)\n\n doFirst {\n delombokSkijaSrcDir.deleteRecursively()\n delombokSkijaSrcDir.mkdirs()\n }\n doLast {\n // Remove Library.java from Skija.\n file(delombokSkijaSrcDir.path + \"/org/jetbrains/skija/impl/Library.java\").delete()\n }\n }.map { delombokSkijaSrcDir }\n}\n\nkotlin {\n jvm {\n compilations.all {\n kotlinOptions.jvmTarget = \"11\"\n }\n withJava()\n }\n\n sourceSets {\n val commonMain by getting {\n dependencies {\n implementation(kotlin(\"stdlib-common\"))\n }\n }\n val commonTest by getting {\n dependencies {\n implementation(kotlin(\"test-common\"))\n implementation(kotlin(\"test-annotations-common\"))\n }\n }\n val jvmMain by getting {\n kotlin.srcDirs(skijaSrcDir)\n dependencies {\n implementation(kotlin(\"stdlib-jdk8\"))\n implementation(\"org.jetbrains.kotlinx:kotlinx-coroutines-swing:$coroutinesVersion\")\n compileOnly(lombok)\n compileOnly(jetbrainsAnnotations)\n }\n\n }\n val jvmTest by getting {\n dependencies {\n implementation(\"org.jetbrains.kotlinx:kotlinx-coroutines-test:$coroutinesVersion\")\n implementation(kotlin(\"test-junit\"))\n }\n }\n }\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_PHP_><?php\n\n/**\n * This file is part of the GordyAnsell GreenFedora PHP framework.\n *\n * (c) Gordon Ansell <contact@gordonansell.com>\n *\n * For the full copyright and license information, please view the LICENSE\n * file that was distributed with this source code.\n */\n\ndeclare(strict_types=1);\nnamespace GreenFedora\\Inflector;\n\n/**\n * Inflector interface.\n */\t\ninterface InflectorInterface\n{\t \t\n\t/**\n\t * Slugify.\n\t *\n\t * @param \tstring\t\t$data \t\tString to slugify.\n\t *\n\t * @return \tstring\n\t */\n\tpublic function slugify(string $data) : string;\n\t\t\n /**\n * Title case.\n *\n * @param string $data Data to filter.\n *\n * @return string\n */\n public function titleCase(string $data) : string;\n\n\t/**\n\t * Sha.\n\t *\n\t * @param \tstring\t\t$data \t\tString to enode.\n\t *\n\t * @return \tstring\n\t */\n\tpublic function sha(string $data) : string;\n\n\t/**\n\t * Strip script tags.\n\t *\n\t * @param \tstring\t\t$data \t\tString to strip.\n\t *\n\t * @return \tstring\n\t */\n\tpublic function stripScriptTags(string $data) : string;\n\n\t/**\n\t * Strip all tags.\n\t *\n\t * @param \tstring\t\t$data \t\tString to strip.\n\t *\n\t * @return \tstring\n\t */\n\tpublic function stripAllTags(string $data) : string;\n\n\t/**\n\t * Strip tags and their content.\n\t *\n\t * @param \tstring\t\t$data \t\tString to strip.\n\t * @param \tstring\t\t$tags \t\tTags to include.\n\t * @param \tbool\t\t$invert \tInvert processing.\t\n\t *\n\t * @return \tstring\n\t */\n\tpublic function stripTagsContent(string $data, string $tags = '', bool $invert = false) : string;\n\n\t/**\n\t * Shorten a string.\n\t *\n\t * @param \tstring\t\t$data \t\tString to shorten.\n\t * @param \tint\t\t\t$words \t\tNumber of words.\n\t * @param \tstring\t\t$suff \t\tSuffix.\t\n\t *\n\t * @return \tstring\n\t */\n\tpublic function shortenStringWords(string $data, int $words = 100, string $suff = ' ...') : string;\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_C_> * FFmpeg is distributed in the hope that it will be useful,\n * but WITHOUT ANY WARRANTY; without even the implied warranty of\n * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n * Lesser General Public License for more details.\n *\n * You should have received a copy of the GNU Lesser General Public\n * License along with FFmpeg; if not, write to the Free Software\n * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA\n */\n\n#include <stdint.h>\n\n#include \"config.h\"\n#include \"libavutil/attributes.h\"\n#include \"utvideodsp.h\"\n\nstatic void restore_rgb_planes_c(uint8_t *src_r,\n uint8_t *src_g,\n uint8_t *src_b,\n ptrdiff_t linesize_r,\n ptrdiff_t linesize_g,\n ptrdiff_t linesize_b,\n int width, int height)\n{\n uint8_t r, g, b;\n int i, j;\n\n for (j = 0; j < height; j++) {\n for (i = 0; i < width; i++) {\n r = src_r[i];\n g = src_g[i];\n b = src_b[i];\n src_r[i] = r + g - 0x80;\n src_b[i] = b + g - 0x80;\n }\n src_r += linesize_r;\n src_g += linesize_g;\n src_b += linesize_b;\n }\n}\n\nstatic void restore_rgb_planes10_c(uint16_t *src_r,\n uint16_t *src_g,\n uint16_t *src_b,\n ptrdiff_t linesize_r,\n ptrdiff_t linesize_g,\n ptrdiff_t linesize_b,\n int width, int height)\n{\n int r, g, b;\n int i, j;\n\n for (j = 0; j < height; j++) {\n for (i = 0; i < width; i++) {\n r = src_r[i];\n g = src_g[i];\n b = src_b[i];\n src_r[i] = (r + g - 0x200) & 0x3FF;\n src_b[i] = (b + g - 0x200) & 0x3FF;\n }\n src_r += linesize_r;\n src_g += linesize_g;\n src_b += linesize_b;\n }\n}\n\nav_cold void ff_utvideodsp_init(UTVideoDSPContext *c)\n{\n c->restore_rgb_planes = restore_rgb_planes_c;\n c->restore_rgb_planes10 = restore_rgb_planes10_c;\n\n if (ARCH_X86)\n ff_utvideodsp_init_x86(c);\n}\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_># -*- coding: utf-8 -*-\n#\n# Flask-YAMLI18N documentation build configuration file, created by\n# sphinx-quickstart on Tue Oct 16 16:15:46 2012.\n#\n# This file is execfile()d with the current directory set to its containing dir.\n#\n# Note that not all possible configuration values are present in this\n# autogenerated file.\n#\n# All configuration values have a default; values that are commented out\n# serve to show the default.\n\nimport sys, os\nsys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))\nsys.path.append(os.path.abspath('_themes'))\n\n# If extensions (or modules to document with autodoc) are in another directory,\n# add these directories to sys.path here. If the directory is relative to the\n# documentation root, use os.path.abspath to make it absolute, like shown here.\n#sys.path.insert(0, os.path.abspath('.'))\n\n# -- General configuration -----------------------------------------------------\n\n# If your documentation needs a minimal Sphinx version, state it here.\n#needs_sphinx = '1.0'\n\n# Add any Sphinx extension module names here, as strings. They can be extensions\n# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.\nextensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo']\n# 'sphinx.ext.pngmath', 'sphinx.ext.viewcode']\n\n# Add any paths that contain templates here, relative to this directory.\ntemplates_path = ['_templates']\nhtml_theme_path = ['_themes']\n\n# The suffix of source filenames.\nsource_suffix = '.rst'\n\n# The encoding of source files.\n#source_encoding = 'utf-8-sig'\n\n# The master toctree document.\nmaster_doc = 'index'\n\n# General information about the project.\nproject = u'Flask YAMLI18N'\ncopyright = u'2012, Lix Xu'\n\n# The version info for the project you're documenting, acts as replacement for\n# |version| and |release|, also used in various other places throughout the\n# built documents.\n#\n# The short X.Y version.\nversion = '0.2'\n# The full version, including alpha/beta/rc tags.\nrelease = '0.2.0'\n\n# The language for content autogenerated by Sphinx. Refer to documentation\n# for a list of supported languages.\n#language = None\n\n# There are two options for replacing |today|: either, you set today to some\n# non-false value, then it is used:\n#today = ''\n# Else, today_fmt is used as the format for a strftime call.\n#today_fmt = '%B %d, %Y'\n\n# List of patterns, relative to source directory, that match files and\n# directories to ignore when looking for source files.\nexclude_patterns = ['_build']\n\n# The reST default role (used for this markup: `text`) to use for all documents.\n#default_role = None\n\n# If true, '()' will be appended to :func: etc. cross-reference text.\n#add_function_parentheses = True\n\n# If true, the current module name will be prepended to all description\n# unit titles (such as .. function::).\n#add_module_names = True\n\n# If true, sectionauthor and moduleauthor directives will be shown in the\n# output. They are ignored by default.\n#show_authors = False\n\n# The name of the Pygments (syntax highlighting) style to use.\npygments_style = 'flask_theme_support.FlaskyStyle'\n\n# A list of ignored prefixes for module index sorting.\n#modindex_common_prefix = []\n\n\n# -- Options for HTML output ---------------------------------------------------\n\n# The theme to use for HTML and HTML Help pages. See the documentation for\n# a list of builtin themes.\n# html_theme = 'nature'\nhtml_theme = 'flask_small'\n\nhtml_theme_options = {\n 'index_logo': 'flask-yamli18n.png',\n 'github_fork': 'lixxu/flask-yamli18n'\n}\n\n# Theme options are theme-specific and customize the look and feel of a theme\n# further. For a list of options available for each theme, see the\n# documentation.\n#html_theme_options = {}\n\n# Add any paths that contain custom themes here, relative to this directory.\n#html_theme_path = []\n\n# The name for this set of Sphinx documents. If None, it defaults to\n# \"<project> v<release> documentation\".\n#html_title = None\n\n# A shorter title for the navigation bar. Default is the same as html_title.\n#html_short_title = None\n\n# The name of an image file (relative to this directory) to place at the top\n# of the sidebar.\n#html_logo = None\n\n# The name of an image file (within the static path) to use as favicon of the\n# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32\n# pixels large.\n#html_favicon = None\n\n# Add any paths that contain custom static files (such as style sheets) here,\n# relative to this directory. They are copied after the builtin static files,\n# so a file named \"default.css\" will overwrite the builtin \"default.css\".\nhtml_static_path = ['_static']\n\n# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,\n# using the given strftime format.\n#html_last_updated_fmt = '%b %d, %Y'\n\n# If true, SmartyPants will be used to convert quotes and dashes to\n# typographically correct entities.\n#html_use_smartypants = True\n\n# Custom sidebar templates, maps document names to template names.\n#html_sidebars = {}\n\n# Additional templates that should be rendered to pages, maps page names to\n# template names.\n#html_additional_pages = {}\n\n# If false, no module index is generated.\n#html_domain_indices = True\n\n# If false, no index is generated.\n#html_use_index = True\n\n# If true, the index is split into individual pages for each letter.\n#html_split_index = False\n\n# If true, links to the reST sources are added to the pages.\n#html_show_sourcelink = True\n\n# If true, \"Created using Sphinx\" is shown in the HTML footer. Default is True.\n#html_show_sphinx = True\n\n# If true, \"(C) Copyright ...\" is shown in the HTML footer. Default is True.\n#html_show_copyright = True\n\n# If true, an OpenSearch description file will be output, and all pages will\n# contain a <link> tag referring to it. The value of this option must be the\n# base URL from which the finished HTML is served.\n#html_use_opensearch = ''\n\n# This is the file name suffix for HTML files (e.g. \".xhtml\").\n#html_file_suffix = None\n\n# Output file base name for HTML help builder.\nhtmlhelp_basename = 'FlaskYAMLI18Ndoc'\n\n\n# -- Options for LaTeX output --------------------------------------------------\n\nlatex_elements = {\n# The paper size ('letterpaper' or 'a4paper').\n#'papersize': 'letterpaper',\n\n# The font size ('10pt', '11pt' or '12pt').\n#'pointsize': '10pt',\n\n# Additional stuff for the LaTeX preamble.\n#'preamble': '',\n}\n\n# Grouping the document tree into LaTeX files. List of tuples\n# (source start file, target name, title, author, documentclass [howto/manual]).\nlatex_documents = [\n ('index', 'FlaskYAMLI18N.tex', u'Flask YAMLI18N Documentation',\n u'Lix Xu', 'manual'),\n]\n\n# The name of an image file (relative to this directory) to place at the top of\n# the title page.</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Crystal_># THIS FILE WAS AUTO GENERATED FROM THE K8S SWAGGER SPEC\n\nrequire \"yaml\"\nrequire \"json\"\n\nmodule Pyrite\n # Binding ties one object to another; for example, a pod is bound to a node by a scheduler. Deprecated in 1.7, please use the bindings subresource of pods instead.\n class Api::Core::V1::Binding < Kubernetes::Object\n @api_version = \"v1\"\n @kind = \"Binding\"\n\n def self.new(pull : ::JSON::PullParser)\n previous_def(pull).tap do |instance|\n unless instance.api_version == \"v1\" && instance.kind == \"Binding\"\n raise ::JSON::ParseException.new(\"Couldn't parse #{self} from #{pull.read_raw}\", *pull.location)\n end\n end\n end\n\n def self.new(ctx : ::YAML::ParseContext, node : ::YAML::Nodes::Node)\n previous_def(ctx, node).tap do |instance|\n unless instance.api_version == \"v1\" && instance.kind == \"Binding\"\n raise ::YAML::ParseException.new(\"Couldn't parse #{self}\", *node.location)\n end\n end\n end\n\n # The target object that you want to bind to the standard object.\n @[::JSON::Field(key: \"target\")]\n @[::YAML::Field(key: \"target\")]\n property target : Api::Core::V1::ObjectReference\n\n def initialize(*, @metadata : Apimachinery::Apis::Meta::V1::ObjectMeta | Nil = nil, @target : Api::Core::V1::ObjectReference)\n end\n end\nend\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_YAML_>postsubmits:\n openshift-priv/console-operator:\n - agent: kubernetes\n branches:\n - ^release-4\\.1$\n decorate: true\n decoration_config:\n skip_cloning: true\n hidden: true\n labels:\n ci-operator.openshift.io/prowgen-controlled: \"true\"\n name: branch-ci-openshift-priv-console-operator-release-4.1-images\n path_alias: github.com/openshift/console-operator\n spec:\n containers:\n - args:\n - --artifact-dir=$(ARTIFACTS)\n - --give-pr-author-access-to-namespace=true\n - --image-import-pull-secret=/etc/pull-secret/.dockerconfigjson\n - --kubeconfig=/etc/apici/kubeconfig\n - --oauth-token-path=/usr/local/github-credentials/oauth\n - --promote\n - --sentry-dsn-path=/etc/sentry-dsn/ci-operator\n - --target=[images]\n command:\n - ci-operator\n image: ci-operator:latest\n imagePullPolicy: Always\n name: \"\"\n resources:\n requests:\n cpu: 10m\n volumeMounts:\n - mountPath: /etc/apici\n name: apici-ci-operator-credentials\n readOnly: true\n - mountPath: /usr/local/github-credentials\n name: github-credentials-openshift-ci-robot-private-git-cloner\n readOnly: true\n - mountPath: /etc/pull-secret\n name: pull-secret\n readOnly: true\n - mountPath: /etc/sentry-dsn\n name: sentry-dsn\n readOnly: true\n serviceAccountName: ci-operator\n volumes:\n - name: apici-ci-operator-credentials\n secret:\n items:\n - key: sa.ci-operator.apici.config\n path: kubeconfig\n secretName: apici-ci-operator-credentials\n - name: github-credentials-openshift-ci-robot-private-git-cloner\n secret:\n secretName: github-credentials-openshift-ci-robot-private-git-cloner\n - name: pull-secret\n secret:\n secretName: regcred\n - name: sentry-dsn\n secret:\n secretName: sentry-dsn\n</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_JavaScript_> 'node_modules/jquery-ui/themes/themes/theme.css',\n 'node_modules/ng-dialog/css/ngDialog.css',\n 'node_modules/malihu-custom-scrollbar-plugin/jquery.mCustomScrollbar.css',\n 'node_modules/angular-ui-layout/src/ui-layout.css',\n 'node_modules/angularjs-slider/dist/rzslider.css',\n 'node_modules/angular-cron-jobs/dist/angular-cron-jobs.css'\n ]\n },\n</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]
[ { "assistant": "<code><loc_0><loc_0><loc_500><loc_500><_Python_>hand = {\n 'a': input(),\n 'b': input(),\n 'c': input()\n}\nturn = 'a'\n\nwhile True:\n if not hand[turn]:\n break\n s = hand[turn]\n hand[turn] = s[1:]\n turn = s[0]\n\nprint(turn.upper())</code>", "source": "SynthCodeNetNoImageTag", "user": "Convert code to text." } ]
[ { "assistant": "<loc_0><loc_0><loc_500><loc_500><_C_> return (handle >= CONFIG_SPI_DEVICE_MAX || !g_spi_devices[handle].is_initialized);\n}\n\nstatic error_t spi_configure(spi_device_handle_t handle)\n{\n if (spi_invalid_handle(handle)) {\n log_error(ERROR_INVALID, \"Invalid SPI handle\");\n return ERROR_INVALID;\n }\n\n spi_device_configuration_t *spi_dev = &g_spi_devices[handle].config;\n g_current_spi_device = handle;\n\n uint32_t cr1_value = 0;\n uint32_t cr2_value = 0;\n\n cr1_value |=\n (spi_dev->clock_mode << SPI_CR1_CPHA_Pos) |\n (spi_dev->mode << SPI_CR1_MSTR_Pos) |\n (spi_dev->baud_rate_prescaler << SPI_CR1_BR_Pos) |\n (spi_dev->significant_bit << SPI_CR1_LSBFIRST_Pos);\n\n if (spi_dev->com_mode == SPI_COM_MODE_FULL_DUPLEX) {\n cr1_value &= ~(1 << SPI_CR1_RXONLY_Pos);\n\n } else if (spi_dev->com_mode == SPI_COM_MODE_HALF_DUPLEX_RECEIVE) {\n cr1_value |= 1 << SPI_CR1_BIDIMODE_Pos;\n cr1_value &= ~(1 << SPI_CR1_BIDIOE_Pos);\n\n } else if (spi_dev->com_mode == SPI_COM_MODE_HALF_DUPLEX_TRANSMIT) {\n cr1_value |= (1 << SPI_CR1_BIDIMODE_Pos) |\n (1 << SPI_CR1_BIDIOE_Pos);\n }\n\n cr1_value |= SPI_CR1_SSM;\n cr2_value |= SPI_CR2_SSOE;\n\n cr2_value |= spi_dev->data_size << SPI_CR2_DS_Pos;\n cr2_value |= SPI_CR2_FRXTH;\n\n spi_dev->spi->CR1 = cr1_value;\n spi_dev->spi->CR2 = cr2_value;\n\n spi_dev->spi->CR1 |= SPI_CR1_SPE;\n\n return SUCCESS;\n}\n\nstatic bool spi_tx_buffer_empty(SPI_TypeDef *spi)\n{\n return (bool)(spi->SR & 2);\n}\n\nstatic void spi_transmit_8bit(SPI_TypeDef *spi, uint8_t data)\n{\n while (!spi_tx_buffer_empty(spi)) {}\n *(uint8_t *)&spi->DR = data;\n}\n\nstatic void spi_receive_8bit(SPI_TypeDef *spi, uint8_t *data)\n{\n *data = spi->DR;\n}\n\nstatic error_t spi_find_free_interface(spi_interface_handle_t *handle)\n{\n if (!handle) {\n return ERROR_INVALID;\n }\n\n spi_interface_handle_t spi_handle;\n\n for (spi_handle = 0; spi_handle < CONFIG_SPI_INTERFACE_MAX; spi_handle++) {\n if (!g_spi_interfaces[spi_handle].is_initialized) {\n *handle = spi_handle;\n return SUCCESS;\n }\n }\n\n return ERROR_NO_MEMORY;\n}\n\nstatic error_t spi_find_free_device(spi_device_handle_t *handle)\n{\n if (!handle) {\n return ERROR_INVALID;\n }\n\n spi_device_handle_t spi_handle;\n\n for (spi_handle = 0; spi_handle < CONFIG_SPI_DEVICE_MAX; spi_handle++) {\n if (!g_spi_devices[spi_handle].is_initialized) {\n *handle = spi_handle;\n return SUCCESS;</code>", "source": "SynthCodeNetNoImageTag", "user": "<code>" } ]