{ // 获取包含Hugging Face文本的span元素 const spans = link.querySelectorAll('span.whitespace-nowrap, span.hidden.whitespace-nowrap'); spans.forEach(span => { if (span.textContent && span.textContent.trim().match(/Hugging\s*Face/i)) { span.textContent = 'AI快站'; } }); }); // 替换logo图片的alt属性 document.querySelectorAll('img[alt*="Hugging"], img[alt*="Face"]').forEach(img => { if (img.alt.match(/Hugging\s*Face/i)) { img.alt = 'AI快站 logo'; } }); } // 替换导航栏中的链接 function replaceNavigationLinks() { // 已替换标记,防止重复运行 if (window._navLinksReplaced) { return; } // 已经替换过的链接集合,防止重复替换 const replacedLinks = new Set(); // 只在导航栏区域查找和替换链接 const headerArea = document.querySelector('header') || document.querySelector('nav'); if (!headerArea) { return; } // 在导航区域内查找链接 const navLinks = headerArea.querySelectorAll('a'); navLinks.forEach(link => { // 如果已经替换过,跳过 if (replacedLinks.has(link)) return; const linkText = link.textContent.trim(); const linkHref = link.getAttribute('href') || ''; // 替换Spaces链接 - 仅替换一次 if ( (linkHref.includes('/spaces') || linkHref === '/spaces' || linkText === 'Spaces' || linkText.match(/^s*Spacess*$/i)) && linkText !== 'OCR模型免费转Markdown' && linkText !== 'OCR模型免费转Markdown' ) { link.textContent = 'OCR模型免费转Markdown'; link.href = 'https://fast360.xyz'; link.setAttribute('target', '_blank'); link.setAttribute('rel', 'noopener noreferrer'); replacedLinks.add(link); } // 删除Posts链接 else if ( (linkHref.includes('/posts') || linkHref === '/posts' || linkText === 'Posts' || linkText.match(/^s*Postss*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } // 替换Docs链接 - 仅替换一次 else if ( (linkHref.includes('/docs') || linkHref === '/docs' || linkText === 'Docs' || linkText.match(/^s*Docss*$/i)) && linkText !== '模型下载攻略' ) { link.textContent = '模型下载攻略'; link.href = '/'; replacedLinks.add(link); } // 删除Enterprise链接 else if ( (linkHref.includes('/enterprise') || linkHref === '/enterprise' || linkText === 'Enterprise' || linkText.match(/^s*Enterprises*$/i)) ) { if (link.parentNode) { link.parentNode.removeChild(link); } replacedLinks.add(link); } }); // 查找可能嵌套的Spaces和Posts文本 const textNodes = []; function findTextNodes(element) { if (element.nodeType === Node.TEXT_NODE) { const text = element.textContent.trim(); if (text === 'Spaces' || text === 'Posts' || text === 'Enterprise') { textNodes.push(element); } } else { for (const child of element.childNodes) { findTextNodes(child); } } } // 只在导航区域内查找文本节点 findTextNodes(headerArea); // 替换找到的文本节点 textNodes.forEach(node => { const text = node.textContent.trim(); if (text === 'Spaces') { node.textContent = node.textContent.replace(/Spaces/g, 'OCR模型免费转Markdown'); } else if (text === 'Posts') { // 删除Posts文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } else if (text === 'Enterprise') { // 删除Enterprise文本节点 if (node.parentNode) { node.parentNode.removeChild(node); } } }); // 标记已替换完成 window._navLinksReplaced = true; } // 替换代码区域中的域名 function replaceCodeDomains() { // 特别处理span.hljs-string和span.njs-string元素 document.querySelectorAll('span.hljs-string, span.njs-string, span[class*="hljs-string"], span[class*="njs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换hljs-string类的span中的域名(移除多余的转义符号) document.querySelectorAll('span.hljs-string, span[class*="hljs-string"]').forEach(span => { if (span.textContent && span.textContent.includes('huggingface.co')) { span.textContent = span.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 替换pre和code标签中包含git clone命令的域名 document.querySelectorAll('pre, code').forEach(element => { if (element.textContent && element.textContent.includes('git clone')) { const text = element.innerHTML; if (text.includes('huggingface.co')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 处理特定的命令行示例 document.querySelectorAll('pre, code').forEach(element => { const text = element.innerHTML; if (text.includes('huggingface.co')) { // 针对git clone命令的专门处理 if (text.includes('git clone') || text.includes('GIT_LFS_SKIP_SMUDGE=1')) { element.innerHTML = text.replace(/huggingface.co/g, 'aifasthub.com'); } } }); // 特别处理模型下载页面上的代码片段 document.querySelectorAll('.flex.border-t, .svelte_hydrator, .inline-block').forEach(container => { const content = container.innerHTML; if (content && content.includes('huggingface.co')) { container.innerHTML = content.replace(/huggingface.co/g, 'aifasthub.com'); } }); // 特别处理模型仓库克隆对话框中的代码片段 try { // 查找包含"Clone this model repository"标题的对话框 const cloneDialog = document.querySelector('.svelte_hydration_boundary, [data-target="MainHeader"]'); if (cloneDialog) { // 查找对话框中所有的代码片段和命令示例 const codeElements = cloneDialog.querySelectorAll('pre, code, span'); codeElements.forEach(element => { if (element.textContent && element.textContent.includes('huggingface.co')) { if (element.innerHTML.includes('huggingface.co')) { element.innerHTML = element.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { element.textContent = element.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); } // 更精确地定位克隆命令中的域名 document.querySelectorAll('[data-target]').forEach(container => { const codeBlocks = container.querySelectorAll('pre, code, span.hljs-string'); codeBlocks.forEach(block => { if (block.textContent && block.textContent.includes('huggingface.co')) { if (block.innerHTML.includes('huggingface.co')) { block.innerHTML = block.innerHTML.replace(/huggingface.co/g, 'aifasthub.com'); } else { block.textContent = block.textContent.replace(/huggingface.co/g, 'aifasthub.com'); } } }); }); } catch (e) { // 错误处理但不打印日志 } } // 当DOM加载完成后执行替换 if (document.readyState === 'loading') { document.addEventListener('DOMContentLoaded', () => { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); }); } else { replaceHeaderBranding(); replaceNavigationLinks(); replaceCodeDomains(); // 只在必要时执行替换 - 3秒后再次检查 setTimeout(() => { if (!window._navLinksReplaced) { console.log('[Client] 3秒后重新检查导航链接'); replaceNavigationLinks(); } }, 3000); } // 增加一个MutationObserver来处理可能的动态元素加载 const observer = new MutationObserver(mutations => { // 检查是否导航区域有变化 const hasNavChanges = mutations.some(mutation => { // 检查是否存在header或nav元素变化 return Array.from(mutation.addedNodes).some(node => { if (node.nodeType === Node.ELEMENT_NODE) { // 检查是否是导航元素或其子元素 if (node.tagName === 'HEADER' || node.tagName === 'NAV' || node.querySelector('header, nav')) { return true; } // 检查是否在导航元素内部 let parent = node.parentElement; while (parent) { if (parent.tagName === 'HEADER' || parent.tagName === 'NAV') { return true; } parent = parent.parentElement; } } return false; }); }); // 只在导航区域有变化时执行替换 if (hasNavChanges) { // 重置替换状态,允许再次替换 window._navLinksReplaced = false; replaceHeaderBranding(); replaceNavigationLinks(); } }); // 开始观察document.body的变化,包括子节点 if (document.body) { observer.observe(document.body, { childList: true, subtree: true }); } else { document.addEventListener('DOMContentLoaded', () => { observer.observe(document.body, { childList: true, subtree: true }); }); } })(); \n\n''' % (\r\n header_html,\r\n)\r\nst.sidebar.markdown(\r\n header_full,\r\n unsafe_allow_html=True,\r\n)\r\n\r\n# Long Form QA with ELI5 and Wikipedia\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t ='''\nThis demo presents a model trained to [provide long-form answers to open-domain questions](https://yjernite.github.io/lfqa.html).\nFirst, a document retriever fetches a set of relevant Wikipedia passages given the question from the [Wiki40b](https://research.google/pubs/pub49029/) dataset,\na pre-processed fixed snapshot of Wikipedia.\n'''\r\nst.sidebar.markdown(description, unsafe_allow_html=True)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict\t\t\t\t\t\t\t =[\r\n '''Answer the question''',\r\n '''View the retrieved document only''',\r\n '''View the most similar ELI5 question and answer''',\r\n '''Show me everything, please!''',\r\n]\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any]\t\t\t\t\t\t\t =st.sidebar.checkbox('Demo options')\r\nif demo_options:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str]\t\t\t\t\t\t\t =st.sidebar.selectbox(\r\n\t\t '',\r\n\t\t action_list,\r\n\t\t index=3,\r\n\t\t)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t =action_list.index(action_st)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t =st.sidebar.selectbox(\r\n\t\t '',\r\n\t\t ['Show full text of passages', 'Show passage section titles'],\r\n\t\t index=0,\r\n\t\t)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any]\t\t\t\t\t\t\t =show_type == '''Show full text of passages'''\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int]\t\t\t\t\t\t\t =3\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any]\t\t\t\t\t\t\t =True\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any]\t\t\t\t\t\t\t =st.sidebar.checkbox('Retrieval options')\r\nif retrieval_options:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any]\t\t\t\t\t\t\t ='''\n ### Information retriever options\n\n The **sparse** retriever uses ElasticSearch, while the **dense** retriever uses max-inner-product search between a question and passage embedding\n trained using the [ELI5](https://arxiv.org/abs/1907.09190) questions-answer pairs.\n The answer is then generated by sequence to sequence model which takes the question and retrieved document as input.\n '''\r\n\t\tst.sidebar.markdown(retriever_info)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict\t\t\t\t\t\t\t =st.sidebar.selectbox('Which Wikipedia format should the model use?', ['wiki40b', 'none'])\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t =st.sidebar.selectbox('Which Wikipedia indexer should the model use?', ['dense', 'sparse', 'mixed'])\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint\t\t\t\t\t\t\t ='''wiki40b'''\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict\t\t\t\t\t\t\t ='''dense'''\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t ='''beam'''\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr\t\t\t\t\t\t\t =2\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple\t\t\t\t\t\t\t =64\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any]\t\t\t\t\t\t\t =256\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int]\t\t\t\t\t\t\t =None\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple\t\t\t\t\t\t\t =None\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr\t\t\t\t\t\t\t =st.sidebar.checkbox('Generation options')\r\nif generate_options:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr\t\t\t\t\t\t\t ='''\n ### Answer generation options\n\n The sequence-to-sequence model was initialized with [BART](https://huggingface.co/facebook/bart-large)\n weights and fine-tuned on the ELI5 QA pairs and retrieved documents. You can use the model for greedy decoding with\n **beam** search, or **sample** from the decoder\\'s output probabilities.\n '''\r\n\t\tst.sidebar.markdown(generate_info)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict\t\t\t\t\t\t\t =st.sidebar.selectbox('Would you like to use beam search or sample an answer?', ['beam', 'sampled'])\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t =st.sidebar.slider(\r\n\t\t 'Minimum generation length', min_value=8, max_value=256, value=64, step=8, format=None, key=None\r\n\t\t)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int]\t\t\t\t\t\t\t =st.sidebar.slider(\r\n\t\t 'Maximum generation length', min_value=64, max_value=512, value=256, step=16, format=None, key=None\r\n\t\t)\r\n\t\tif sampled == \"beam\":\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t =st.sidebar.slider('Beam size', min_value=1, max_value=8, value=2, step=None, format=None, key=None)\r\n\t\telse:\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint\t\t\t\t\t\t\t =st.sidebar.slider(\r\n\t\t\t\t 'Nucleus sampling p', min_value=0.1, max_value=1.0, value=0.9_5, step=0.0_1, format=None, key=None\r\n\t\t\t\t)\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any]\t\t\t\t\t\t\t =st.sidebar.slider(\r\n\t\t\t\t 'Temperature', min_value=0.1, max_value=1.0, value=0.7, step=0.0_1, format=None, key=None\r\n\t\t\t\t)\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any]\t\t\t\t\t\t\t =None\r\n\r\n# start main text\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any]\t\t\t\t\t\t\t =[\r\n '''''',\r\n '''How do people make chocolate?''',\r\n '''Why do we get a fever when we are sick?''',\r\n '''How can different animals perceive different colors?''',\r\n '''What is natural language processing?''',\r\n '''What\\'s the best way to treat a sunburn?''',\r\n '''What exactly are vitamins ?''',\r\n '''How does nuclear energy provide electricity?''',\r\n '''What\\'s the difference between viruses and bacteria?''',\r\n '''Why are flutes classified as woodwinds when most of them are made out of metal ?''',\r\n '''Why do people like drinking coffee even though it tastes so bad?''',\r\n '''What happens when wine ages? How does it make the wine taste better?''',\r\n '''If an animal is an herbivore, where does it get the protein that it needs to survive if it only eats grass?''',\r\n '''How can we set a date to the beginning or end of an artistic period? Doesn\\'t the change happen gradually?''',\r\n '''How does New Zealand have so many large bird predators?''',\r\n]\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict\t\t\t\t\t\t\t =st.selectbox(\r\n 'What would you like to ask? ---- select to enter a new query',\r\n questions_list,\r\n index=1,\r\n)\r\nif question_s == \"\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any]\t\t\t\t\t\t\t =st.text_input('Enter your question here:', '')\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple\t\t\t\t\t\t\t =question_s\r\n\r\nif st.button('Show me!'):\r\n\t\tif action in [0, 1, 3]:\r\n\t\t\t\tif index_type == \"mixed\":\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t =make_support(question, source=wiki_source, method='dense', n_results=10)\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t =make_support(question, source=wiki_source, method='sparse', n_results=10)\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t =[]\r\n\t\t\t\t\t\tfor res_d, res_s in zip(support_list_dense, support_list_sparse):\r\n\t\t\t\t\t\t\t\tif tuple(res_d) not in support_list:\r\n\t\t\t\t\t\t\t\t\t\tsupport_list += [tuple(res_d)]\r\n\t\t\t\t\t\t\t\tif tuple(res_s) not in support_list:\r\n\t\t\t\t\t\t\t\t\t\tsupport_list += [tuple(res_s)]\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t =support_list[:10]\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t ='''

''' + '''

'''.join([res[-1] for res in support_list])\r\n\t\t\t\telse:\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t =make_support(question, source=wiki_source, method=index_type, n_results=10)\r\n\t\tif action in [0, 3]:\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t =answer_question(\r\n\t\t\t\t question_doc,\r\n\t\t\t\t sas_model,\r\n\t\t\t\t sas_tokenizer,\r\n\t\t\t\t min_len=min_len,\r\n\t\t\t\t max_len=int(max_len),\r\n\t\t\t\t sampling=(sampled == 'sampled'),\r\n\t\t\t\t n_beams=n_beams,\r\n\t\t\t\t top_p=top_p,\r\n\t\t\t\t temp=temp,\r\n\t\t\t\t)\r\n\t\t\t\tst.markdown('### The model generated answer is:')\r\n\t\t\t\tst.write(answer)\r\n\t\tif action in [0, 1, 3] and wiki_source != \"none\":\r\n\t\t\t\tst.markdown('--- \\n ### The model is drawing information from the following Wikipedia passages:')\r\n\t\t\t\tfor i, res in enumerate(support_list):\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr\t\t\t\t\t\t\t ='''https://en.wikipedia.org/wiki/{}'''.format(res[0].replace(' ', '_'))\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple\t\t\t\t\t\t\t =res[1].strip()\r\n\t\t\t\t\t\tif sec_titles == \"\":\r\n\t\t\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any]\t\t\t\t\t\t\t ='''[{}]({})'''.format(res[0], wiki_url)\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any]\t\t\t\t\t\t\t =sec_titles.split(' & ')\r\n\t\t\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any]\t\t\t\t\t\t\t =''' & '''.join(\r\n\t\t\t\t\t\t\t\t ['[{}]({}#{})'.format(sec.strip(), wiki_url, sec.strip().replace(' ', '_')) for sec in sec_list]\r\n\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\tst.markdown(\r\n\t\t\t\t\t\t '{0:02d} - **Article**: {1:<18}
_Section_: {2}'.format(i + 1, res[0], sections),\r\n\t\t\t\t\t\t unsafe_allow_html=True,\r\n\t\t\t\t\t\t)\r\n\t\t\t\t\t\tif show_passages:\r\n\t\t\t\t\t\t\t\tst.write(\r\n\t\t\t\t\t\t\t\t '> ' + res[-1] + '', unsafe_allow_html=True\r\n\t\t\t\t\t\t\t\t)\r\n\t\tif action in [2, 3]:\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint\t\t\t\t\t\t\t =find_nearest_training(question)\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint\t\t\t\t\t\t\t =nn_train_list[0]\r\n\t\t\t\tst.markdown(\r\n\t\t\t\t '--- \\n ### The most similar question in the ELI5 training set was: \\n\\n {}'.format(train_exple['title'])\r\n\t\t\t\t)\r\n\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any]\t\t\t\t\t\t\t =[\r\n\t\t\t\t '''{}. {}'''.format(i + 1, ' \\n'.join([line.strip() for line in ans.split('\\n') if line.strip() != '']))\r\n\t\t\t\t for i, (ans, sc) in enumerate(zip(train_exple['answers']['text'], train_exple['answers']['score']))\r\n\t\t\t\t if i == 0 or sc > 2\r\n\t\t\t\t]\r\n\t\t\t\tst.markdown('##### Its answers were: \\n\\n {}'.format('\\n'.join(answers_st)))\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny\t\t\t\t\t\t\t ='''\n---\n\n**Disclaimer**\n\n*The intent of this app is to provide some (hopefully entertaining) insights into the behavior of a current LFQA system.\nEvaluating biases of such a model and ensuring factual generations are still very much open research problems.\nTherefore, until some significant progress is achieved, we caution against using the generated answers for practical purposes.*\n'''\r\n\r\nst.sidebar.markdown(disclaimer, unsafe_allow_html=True)\r\n\r\n"},"code_codestyle":{"kind":"number","value":719,"string":"719"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif p < 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"p should not be less than 2!\"\"\" )\r\n\t\t\t\t\t\telif p == 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn True\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t4\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(1 << p) - 1\r\n\t\t\t\t\t\tfor _ in range(p - 2 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t((s * s) - 2) % m\r\n\t\t\t\t\t\treturn s == 0\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tprint(lucas_lehmer_test(7))\r\n\t\tprint(lucas_lehmer_test(11))\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305147,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import Dict, List, Optional, Tuple, Union\r\n\r\nimport numpy as np\r\n\r\nfrom ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict\r\nfrom ...image_transforms import (\r\n center_crop,\r\n get_resize_output_image_size,\r\n normalize,\r\n rescale,\r\n resize,\r\n to_channel_dimension_format,\r\n)\r\nfrom ...image_utils import (\r\n IMAGENET_STANDARD_MEAN,\r\n IMAGENET_STANDARD_STD,\r\n ChannelDimension,\r\n ImageInput,\r\n PILImageResampling,\r\n make_list_of_images,\r\n to_numpy_array,\r\n valid_images,\r\n)\r\nfrom ...utils import TensorType, is_torch_available, is_torch_tensor, logging\r\n\r\n\r\nif is_torch_available():\r\n import torch\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =logging.get_logger(__name__)\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\t_a ):\r\n _UpperCamelCase: List[str] =\t\t[\"\"\"pixel_values\"\"\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n def __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = PILImageResampling.BILINEAR\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = 1 / 255\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tNone:\r\n super().__init__(**snake_case_ )\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tsize if size is not None else {\"\"\"shortest_edge\"\"\": 256}\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tget_size_dict(snake_case_\t\t\t\t\t\t, default_to_square=snake_case_ )\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tcrop_size if crop_size is not None else {\"\"\"height\"\"\": 224, \"\"\"width\"\"\": 224}\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tget_size_dict(snake_case_\t\t\t\t\t\t, param_name=\"\"\"crop_size\"\"\" )\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\tdo_resize\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tsize\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tresample\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tdo_center_crop\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tcrop_size\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdo_rescale\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\trescale_factor\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdo_normalize\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\timage_mean if image_mean is not None else IMAGENET_STANDARD_MEAN\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage_std if image_std is not None else IMAGENET_STANDARD_STD\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = PILImageResampling.BICUBIC\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tnp.ndarray:\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tget_size_dict(snake_case_\t\t\t\t\t\t, default_to_square=snake_case_ )\r\n if \"shortest_edge\" not in size:\r\n raise ValueError(f\"\"\"The `size` parameter must contain the key `shortest_edge`. Got {size.keys()}\"\"\" )\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tget_resize_output_image_size(snake_case_\t\t\t\t\t\t, size=size[\"\"\"shortest_edge\"\"\"]\t\t\t\t\t\t, default_to_square=snake_case_ )\r\n return resize(snake_case_\t\t\t\t\t\t, size=snake_case_\t\t\t\t\t\t, resample=snake_case_\t\t\t\t\t\t, data_format=snake_case_\t\t\t\t\t\t, **snake_case_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tnp.ndarray:\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tget_size_dict(snake_case_ )\r\n if \"height\" not in size or \"width\" not in size:\r\n raise ValueError(f\"\"\"The `size` parameter must contain the keys `height` and `width`. Got {size.keys()}\"\"\" )\r\n return center_crop(snake_case_\t\t\t\t\t\t, size=(size[\"\"\"height\"\"\"], size[\"\"\"width\"\"\"])\t\t\t\t\t\t, data_format=snake_case_\t\t\t\t\t\t, **snake_case_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tnp.ndarray:\r\n return rescale(snake_case_\t\t\t\t\t\t, scale=snake_case_\t\t\t\t\t\t, data_format=snake_case_\t\t\t\t\t\t, **snake_case_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tnp.ndarray:\r\n return normalize(snake_case_\t\t\t\t\t\t, mean=snake_case_\t\t\t\t\t\t, std=snake_case_\t\t\t\t\t\t, data_format=snake_case_\t\t\t\t\t\t, **snake_case_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = ChannelDimension.FIRST\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tTuple:\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdo_resize if do_resize is not None else self.do_resize\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tsize if size is not None else self.size\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tget_size_dict(snake_case_\t\t\t\t\t\t, default_to_square=snake_case_ )\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tresample if resample is not None else self.resample\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdo_center_crop if do_center_crop is not None else self.do_center_crop\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tcrop_size if crop_size is not None else self.crop_size\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tget_size_dict(snake_case_\t\t\t\t\t\t, param_name=\"\"\"crop_size\"\"\" )\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdo_rescale if do_rescale is not None else self.do_rescale\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\trescale_factor if rescale_factor is not None else self.rescale_factor\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdo_normalize if do_normalize is not None else self.do_normalize\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\timage_mean if image_mean is not None else self.image_mean\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\timage_std if image_std is not None else self.image_std\r\n\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmake_list_of_images(snake_case_ )\r\n\r\n if not valid_images(snake_case_ ):\r\n raise ValueError(\r\n \"\"\"Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, \"\"\"\r\n \"\"\"torch.Tensor, tf.Tensor or jax.ndarray.\"\"\" )\r\n\r\n if do_resize and size is None:\r\n raise ValueError(\"\"\"Size must be specified if do_resize is True.\"\"\" )\r\n\r\n if do_center_crop and crop_size is None:\r\n raise ValueError(\"\"\"Crop size must be specified if do_center_crop is True.\"\"\" )\r\n\r\n if do_rescale and rescale_factor is None:\r\n raise ValueError(\"\"\"Rescale factor must be specified if do_rescale is True.\"\"\" )\r\n\r\n if do_normalize and (image_mean is None or image_std is None):\r\n raise ValueError(\"\"\"Image mean and std must be specified if do_normalize is True.\"\"\" )\r\n\r\n # All transformations expect numpy arrays.\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[to_numpy_array(snake_case_ ) for image in images]\r\n\r\n if do_resize:\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[self.resize(image=snake_case_\t\t\t\t\t\t, size=snake_case_\t\t\t\t\t\t, resample=snake_case_ ) for image in images]\r\n\r\n if do_center_crop:\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[self.center_crop(image=snake_case_\t\t\t\t\t\t, size=snake_case_ ) for image in images]\r\n\r\n if do_rescale:\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[self.rescale(image=snake_case_\t\t\t\t\t\t, scale=snake_case_ ) for image in images]\r\n\r\n if do_normalize:\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\t[self.normalize(image=snake_case_\t\t\t\t\t\t, mean=snake_case_\t\t\t\t\t\t, std=snake_case_ ) for image in images]\r\n\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[to_channel_dimension_format(snake_case_\t\t\t\t\t\t, snake_case_ ) for image in images]\r\n\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t{\"\"\"pixel_values\"\"\": images}\r\n return BatchFeature(data=snake_case_\t\t\t\t\t\t, tensor_type=snake_case_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None ) ->\t\t\t\tUnion[str, Any]:\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\toutputs.logits\r\n\r\n # Resize logits and compute semantic segmentation maps\r\n if target_sizes is not None:\r\n if len(snake_case_ ) != len(snake_case_ ):\r\n raise ValueError(\r\n \"\"\"Make sure that you pass in as many target sizes as the batch dimension of the logits\"\"\" )\r\n\r\n if is_torch_tensor(snake_case_ ):\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttarget_sizes.numpy()\r\n\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\t[]\r\n\r\n for idx in range(len(snake_case_ ) ):\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.nn.functional.interpolate(\r\n logits[idx].unsqueeze(dim=0 )\t\t\t\t\t\t, size=target_sizes[idx]\t\t\t\t\t\t, mode=\"\"\"bilinear\"\"\"\t\t\t\t\t\t, align_corners=snake_case_ )\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tresized_logits[0].argmax(dim=0 )\r\n semantic_segmentation.append(snake_case_ )\r\n else:\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tlogits.argmax(dim=1 )\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\t[semantic_segmentation[i] for i in range(semantic_segmentation.shape[0] )]\r\n\r\n return semantic_segmentation\r\n\r\n"},"code_codestyle":{"kind":"number","value":720,"string":"720"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport random\r\nimport unittest\r\n\r\nimport torch\r\n\r\nfrom diffusers import IFImgaImgSuperResolutionPipeline\r\nfrom diffusers.utils import floats_tensor\r\nfrom diffusers.utils.import_utils import is_xformers_available\r\nfrom diffusers.utils.testing_utils import skip_mps, torch_device\r\n\r\nfrom ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS\r\nfrom ..test_pipelines_common import PipelineTesterMixin\r\nfrom . import IFPipelineTesterMixin\r\n\r\n\r\n\r\n\r\n\r\n\r\n@skip_mps\r\nclass _a (\t\t\tsnake_case_ , snake_case_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Optional[Any] =\t\tIFImgaImgSuperResolutionPipeline\r\n\t\t\t_UpperCamelCase: int =\t\tTEXT_GUIDED_IMAGE_VARIATION_PARAMS - {\"width\", \"height\"}\r\n\t\t\t_UpperCamelCase: Optional[int] =\t\tTEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({\"original_image\"} )\r\n\t\t\t_UpperCamelCase: List[str] =\t\tPipelineTesterMixin.required_optional_params - {\"latents\"}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self._get_superresolution_dummy_components()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=0 ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tif str(lowercase_ ).startswith(\"\"\"mps\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttorch.manual_seed(lowercase_ )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttorch.Generator(device=lowercase_ ).manual_seed(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tfloats_tensor((1, 3, 32, 32)\t\t\t\t\t\t, rng=random.Random(lowercase_ ) ).to(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tfloats_tensor((1, 3, 16, 16)\t\t\t\t\t\t, rng=random.Random(lowercase_ ) ).to(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"prompt\"\"\": \"\"\"A painting of a squirrel eating a burger\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"image\"\"\": image,\r\n\t\t\t\t\t\t\t\t\t \"\"\"original_image\"\"\": original_image,\r\n\t\t\t\t\t\t\t\t\t \"\"\"generator\"\"\": generator,\r\n\t\t\t\t\t\t\t\t\t \"\"\"num_inference_steps\"\"\": 2,\r\n\t\t\t\t\t\t\t\t\t \"\"\"output_type\"\"\": \"\"\"numpy\"\"\",\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\treturn inputs\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skipIf(\r\n\t\t\t torch_device != \"\"\"cuda\"\"\" or not is_xformers_available()\t\t\t\t\t\t, reason=\"\"\"XFormers attention is only available with CUDA and `xformers` installed\"\"\"\t\t\t\t\t\t, )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tself._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tself._test_save_load_optional_components()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skipIf(torch_device != \"\"\"cuda\"\"\"\t\t\t\t\t\t, reason=\"\"\"float16 requires CUDA\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\t# Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder\r\n\t\t\t\t\t\t\t\t\tsuper().test_save_load_floataa(expected_max_diff=1e-1 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tself._test_attention_slicing_forward_pass(expected_max_diff=1e-2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tself._test_save_load_local()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tself._test_inference_batch_single_identical(\r\n\t\t\t\t\t\t\t\t\t expected_max_diff=1e-2\t\t\t\t\t\t, )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305148,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom dataclasses import dataclass, field\r\nfrom typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Union\r\n\r\nimport pyarrow as pa\r\n\r\n\r\nif TYPE_CHECKING:\r\n from .features import FeatureType\r\n\r\n\r\n\r\n\r\n\r\n\r\n@dataclass\r\nclass _a :\r\n _UpperCamelCase: str =\t\t42\r\n _UpperCamelCase: int =\t\tNone\r\n # Automatically constructed\r\n _UpperCamelCase: Any =\t\t\"dict\"\r\n _UpperCamelCase: Optional[Any] =\t\tNone\r\n _UpperCamelCase: Optional[Any] =\t\tfield(default=\"Translation\" , init=SCREAMING_SNAKE_CASE__ , repr=SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def __call__(\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n return pa.struct({lang: pa.string() for lang in sorted(self.languages )} )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[\"FeatureType\", Dict[str, \"FeatureType\"]]:\r\n from .features import Value\r\n\r\n return {k: Value(\"\"\"string\"\"\" ) for k in sorted(self.languages )}\r\n\r\n\r\n\r\n\r\n\r\n\r\n@dataclass\r\nclass _a :\r\n _UpperCamelCase: Tuple =\t\tNone\r\n _UpperCamelCase: Tuple =\t\tNone\r\n _UpperCamelCase: str =\t\tNone\r\n # Automatically constructed\r\n _UpperCamelCase: Union[str, Any] =\t\t\"dict\"\r\n _UpperCamelCase: Union[str, Any] =\t\tNone\r\n _UpperCamelCase: Optional[int] =\t\tfield(default=\"TranslationVariableLanguages\" , init=SCREAMING_SNAKE_CASE__ , repr=SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tsorted(set(self.languages ) ) if self.languages else None\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tlen(self.languages ) if self.languages else None\r\n\r\n\r\n\r\n\r\n\r\n\r\n def __call__(\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n return pa.struct({\"\"\"language\"\"\": pa.list_(pa.string() ), \"\"\"translation\"\"\": pa.list_(pa.string() )} )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tTuple:\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tset(self.languages )\r\n if self.languages and set(_lowercase ) - lang_set:\r\n raise ValueError(\r\n f\"\"\"Some languages in example ({', '.join(sorted(set(_lowercase ) - lang_set ) )}) are not in valid set ({', '.join(_lowercase )}).\"\"\" )\r\n\r\n # Convert dictionary into tuples, splitting out cases where there are\r\n # multiple translations for a single language.\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n for lang, text in translation_dict.items():\r\n if isinstance(_lowercase\t\t\t\t\t\t, _lowercase ):\r\n translation_tuples.append((lang, text) )\r\n else:\r\n translation_tuples.extend([(lang, el) for el in text] )\r\n\r\n # Ensure translations are in ascending order by language code.\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tzip(*sorted(_lowercase ) )\r\n\r\n return {\"language\": languages, \"translation\": translations}\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[\"FeatureType\", Dict[str, \"FeatureType\"]]:\r\n from .features import Sequence, Value\r\n\r\n return {\r\n \"language\": Sequence(Value(\"\"\"string\"\"\" ) ),\r\n \"translation\": Sequence(Value(\"\"\"string\"\"\" ) ),\r\n }\r\n\r\n"},"code_codestyle":{"kind":"number","value":721,"string":"721"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\t\"llama\"\r\n\t\t\t_UpperCamelCase: List[str] =\t\t[\"past_key_values\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=32000\t\t\t\t\t\t, lowercase_=4096\t\t\t\t\t\t, lowercase_=11008\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=\"silu\"\t\t\t\t\t\t, lowercase_=2048\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-6\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmax_position_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\thidden_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tintermediate_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnum_hidden_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_attention_heads\r\n\r\n\t\t\t\t\t\t\t\t\t# for backward compatibility\r\n\t\t\t\t\t\t\t\t\tif num_key_value_heads is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_attention_heads\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_key_value_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\thidden_act\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tinitializer_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\trms_norm_eps\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tpretraining_tp\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tuse_cache\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\trope_scaling\r\n\t\t\t\t\t\t\t\t\tself._rope_scaling_validation()\r\n\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(\r\n\t\t\t\t\t\t\t\t\t pad_token_id=lowercase_\t\t\t\t\t\t, bos_token_id=lowercase_\t\t\t\t\t\t, eos_token_id=lowercase_\t\t\t\t\t\t, tie_word_embeddings=lowercase_\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tif self.rope_scaling is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\r\n\t\t\t\t\t\t\t\t\tif not isinstance(self.rope_scaling\t\t\t\t\t\t, lowercase_ ) or len(self.rope_scaling ) != 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"got {self.rope_scaling}\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.rope_scaling.get(\"\"\"type\"\"\"\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.rope_scaling.get(\"\"\"factor\"\"\"\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tif rope_scaling_type is None or rope_scaling_type not in [\"linear\", \"dynamic\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}\"\"\" )\r\n\t\t\t\t\t\t\t\t\tif rope_scaling_factor is None or not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ) or rope_scaling_factor <= 1.0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(f\"\"\"`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}\"\"\" )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305149,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={\r\n 'tanreinama/GPTSAN-2.8B-spout_is_uniform': (\r\n 'https://huggingface.co/tanreinama/GPTSAN-2.8B-spout_is_uniform/resolve/main/config.json'\r\n ),\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\t_lowercase ):\r\n\t\t\t_UpperCamelCase: str =\t\t'''gptsan-japanese'''\r\n\t\t\t_UpperCamelCase: Dict =\t\t[\r\n\t\t\t '''past_key_values''',\r\n\t\t\t]\r\n\t\t\t_UpperCamelCase: Dict =\t\t{\r\n\t\t\t '''hidden_size''': '''d_model''',\r\n\t\t\t '''num_attention_heads''': '''num_heads''',\r\n\t\t\t '''num_hidden_layers''': '''num_layers''',\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=36000\t\t\t\t\t\t, lowercase_=1280\t\t\t\t\t\t, lowercase_=1024\t\t\t\t\t\t, lowercase_=8192\t\t\t\t\t\t, lowercase_=4096\t\t\t\t\t\t, lowercase_=128\t\t\t\t\t\t, lowercase_=10\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=16\t\t\t\t\t\t, lowercase_=16\t\t\t\t\t\t, lowercase_=128\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=1e-5\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=\"float32\"\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=0.0_0_2\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=35998\t\t\t\t\t\t, lowercase_=35995\t\t\t\t\t\t, lowercase_=35999\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmax_position_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\td_model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\td_ff\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\td_ext\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\td_spout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_switch_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnum_ext_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tnum_switch_layers + num_ext_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tnum_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnum_experts\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\texpert_capacity\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdropout_rate\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tlayer_norm_epsilon\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\trouter_bias\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\trouter_jitter_noise\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\trouter_dtype\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\trouter_ignore_padding_tokens\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\toutput_hidden_states\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\toutput_attentions\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tinitializer_factor\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\toutput_router_logits\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tuse_cache\r\n\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(\r\n\t\t\t\t\t\t\t\t\t separator_token_id=A_\t\t\t\t\t\t, pad_token_id=A_\t\t\t\t\t\t, eos_token_id=A_\t\t\t\t\t\t, **A_\t\t\t\t\t\t, )\r\n\r\n"},"code_codestyle":{"kind":"number","value":700,"string":"700"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom collections import OrderedDict\r\nfrom typing import Mapping\r\n\r\nfrom packaging import version\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...onnx import OnnxConfig\r\nfrom ...utils import logging\r\nfrom ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={\r\n 'microsoft/swin-tiny-patch4-window7-224': (\r\n 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json'\r\n ),\r\n # See all Swin models at https://huggingface.co/models?filter=swin\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ , snake_case_ ):\r\n\t\t\t_UpperCamelCase: int =\t\t\"swin\"\r\n\r\n\t\t\t_UpperCamelCase: str =\t\t{\r\n\t\t\t \"num_attention_heads\": \"num_heads\",\r\n\t\t\t \"num_hidden_layers\": \"num_layers\",\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=224\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=96\t\t\t\t\t\t, lowercase_=[2, 2, 6, 2]\t\t\t\t\t\t, lowercase_=[3, 6, 12, 24]\t\t\t\t\t\t, lowercase_=7\t\t\t\t\t\t, lowercase_=4.0\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=\"gelu\"\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-5\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(**lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tpatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tembed_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdepths\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tlen(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\twindow_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tmlp_ratio\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tqkv_bias\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\thidden_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tattention_probs_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdrop_path_rate\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\thidden_act\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tuse_absolute_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tlayer_norm_eps\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tinitializer_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tencoder_stride\r\n\t\t\t\t\t\t\t\t\t# we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel\r\n\t\t\t\t\t\t\t\t\t# this indicates the channel dimension after the last stage of the model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tint(embed_dim * 2 ** (len(lowercase_ ) - 1) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\"\"\"stem\"\"\"] + [f\"\"\"stage{idx}\"\"\" for idx in range(1\t\t\t\t\t\t, len(lowercase_ ) + 1 )]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tget_aligned_output_features_output_indices(\r\n\t\t\t\t\t\t\t\t\t out_features=lowercase_\t\t\t\t\t\t, out_indices=lowercase_\t\t\t\t\t\t, stage_names=self.stage_names )\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: int =\t\tversion.parse(\"1.11\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tMapping[str, Mapping[int, str]]:\r\n\t\t\t\t\t\t\t\t\treturn OrderedDict(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t (\"\"\"pixel_values\"\"\", {0: \"\"\"batch\"\"\", 1: \"\"\"num_channels\"\"\", 2: \"\"\"height\"\"\", 3: \"\"\"width\"\"\"}),\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tfloat:\r\n\t\t\t\t\t\t\t\t\treturn 1e-4\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305150,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport re\r\nfrom pathlib import Path\r\nfrom unittest import TestCase\r\n\r\nimport pytest\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.integration\r\nclass _a (\t\t\tlowercase__ ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tList[str]:\r\n\r\n\t\t\t\t\t\t\t\t\twith open(lowercase_\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as input_file:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tre.compile(R\"\"\"(?!.*\\b(?:encoding|rb|w|wb|w+|wb+|ab|ab+)\\b)(?<=\\s)(open)\\((.*)\\)\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tinput_file.read()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tregexp.search(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\treturn match\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tTuple:\r\n\r\n\t\t\t\t\t\t\t\t\twith open(lowercase_\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as input_file:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tre.compile(R\"\"\"#[^\\r\\n]*print\\(|\\\"[^\\r\\n]*print\\(|\\\"\\\"\\\".*?print\\(.*?\\\"\\\"\\\"|(print\\()\"\"\"\t\t\t\t\t\t, re.DOTALL )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tinput_file.read()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# use `re.finditer` to handle the case where the ignored groups would be matched first by `re.search`\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tregexp.finditer(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[match for match in matches if match is not None and match.group(1 ) is not None]\r\n\t\t\t\t\t\t\t\t\treturn matches[0] if matches else None\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tPath(\"\"\"./datasets\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tlist(dataset_paths.absolute().glob(\"\"\"**/*.py\"\"\" ) )\r\n\r\n\t\t\t\t\t\t\t\t\tfor dataset in dataset_files:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif self._no_encoding_on_file_open(str(lowercase_ ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise AssertionError(f\"\"\"open(...) must use utf-8 encoding in {dataset}\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tPath(\"\"\"./datasets\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tlist(dataset_paths.absolute().glob(\"\"\"**/*.py\"\"\" ) )\r\n\r\n\t\t\t\t\t\t\t\t\tfor dataset in dataset_files:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif self._no_print_statements(str(lowercase_ ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise AssertionError(f\"\"\"print statement found in {dataset}. Use datasets.logger/logging instead.\"\"\" )\r\n\r\n"},"code_codestyle":{"kind":"number","value":701,"string":"701"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr ={\r\n 'Pillow': 'Pillow<10.0.0',\r\n 'accelerate': 'accelerate>=0.20.3',\r\n 'av': 'av==9.2.0',\r\n 'beautifulsoup4': 'beautifulsoup4',\r\n 'black': 'black~=23.1',\r\n 'codecarbon': 'codecarbon==1.2.0',\r\n 'cookiecutter': 'cookiecutter==1.7.3',\r\n 'dataclasses': 'dataclasses',\r\n 'datasets': 'datasets!=2.5.0',\r\n 'decord': 'decord==0.6.0',\r\n 'deepspeed': 'deepspeed>=0.9.3',\r\n 'diffusers': 'diffusers',\r\n 'dill': 'dill<0.3.5',\r\n 'evaluate': 'evaluate>=0.2.0',\r\n 'fairscale': 'fairscale>0.3',\r\n 'faiss-cpu': 'faiss-cpu',\r\n 'fastapi': 'fastapi',\r\n 'filelock': 'filelock',\r\n 'flax': 'flax>=0.4.1,<=0.7.0',\r\n 'ftfy': 'ftfy',\r\n 'fugashi': 'fugashi>=1.0',\r\n 'GitPython': 'GitPython<3.1.19',\r\n 'hf-doc-builder': 'hf-doc-builder>=0.3.0',\r\n 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0',\r\n 'importlib_metadata': 'importlib_metadata',\r\n 'ipadic': 'ipadic>=1.0.0,<2.0',\r\n 'isort': 'isort>=5.5.4',\r\n 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13',\r\n 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13',\r\n 'jieba': 'jieba',\r\n 'kenlm': 'kenlm',\r\n 'keras-nlp': 'keras-nlp>=0.3.1',\r\n 'librosa': 'librosa',\r\n 'nltk': 'nltk',\r\n 'natten': 'natten>=0.14.6',\r\n 'numpy': 'numpy>=1.17',\r\n 'onnxconverter-common': 'onnxconverter-common',\r\n 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2',\r\n 'onnxruntime': 'onnxruntime>=1.4.0',\r\n 'opencv-python': 'opencv-python',\r\n 'optuna': 'optuna',\r\n 'optax': 'optax>=0.0.8,<=0.1.4',\r\n 'packaging': 'packaging>=20.0',\r\n 'parameterized': 'parameterized',\r\n 'phonemizer': 'phonemizer',\r\n 'protobuf': 'protobuf',\r\n 'psutil': 'psutil',\r\n 'pyyaml': 'pyyaml>=5.1',\r\n 'pydantic': 'pydantic<2',\r\n 'pytest': 'pytest>=7.2.0',\r\n 'pytest-timeout': 'pytest-timeout',\r\n 'pytest-xdist': 'pytest-xdist',\r\n 'python': 'python>=3.8.0',\r\n 'ray[tune]': 'ray[tune]',\r\n 'regex': 'regex!=2019.12.17',\r\n 'requests': 'requests',\r\n 'rhoknp': 'rhoknp>=1.1.0,<1.3.1',\r\n 'rjieba': 'rjieba',\r\n 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1',\r\n 'ruff': 'ruff>=0.0.241,<=0.0.259',\r\n 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0',\r\n 'sacremoses': 'sacremoses',\r\n 'safetensors': 'safetensors>=0.3.1',\r\n 'sagemaker': 'sagemaker>=2.31.0',\r\n 'scikit-learn': 'scikit-learn',\r\n 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92',\r\n 'sigopt': 'sigopt',\r\n 'starlette': 'starlette',\r\n 'sudachipy': 'sudachipy>=0.6.6',\r\n 'sudachidict_core': 'sudachidict_core>=20220729',\r\n 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14',\r\n 'tensorflow': 'tensorflow>=2.6,<2.14',\r\n 'tensorflow-text': 'tensorflow-text<2.14',\r\n 'tf2onnx': 'tf2onnx',\r\n 'timeout-decorator': 'timeout-decorator',\r\n 'timm': 'timm',\r\n 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14',\r\n 'torch': 'torch>=1.9,!=1.12.0',\r\n 'torchaudio': 'torchaudio',\r\n 'torchvision': 'torchvision',\r\n 'pyctcdecode': 'pyctcdecode>=0.4.0',\r\n 'tqdm': 'tqdm>=4.27',\r\n 'unidic': 'unidic>=1.0.2',\r\n 'unidic_lite': 'unidic_lite>=1.0.7',\r\n 'urllib3': 'urllib3<2.0.0',\r\n 'uvicorn': 'uvicorn',\r\n}\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305151,"cells":{"code":{"kind":"string","value":"\r\n\r\nfrom itertools import permutations\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif num[3] % 2 != 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\r\n\t\t\t\t\t\tif (num[2] + num[3] + num[4]) % 3 != 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\r\n\t\t\t\t\t\tif num[5] % 5 != 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[7, 1_1, 1_3, 1_7]\r\n\t\t\t\t\t\tfor i, test in enumerate(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tif (num[i + 4] * 1_0_0 + num[i + 5] * 1_0 + num[i + 6]) % test != 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\t\t\t\t\t\treturn True\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ = 1_0 ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\treturn sum(\r\n\t\t\t\t\t\t int(\"\"\"\"\"\".join(map(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) ) )\r\n\t\t\t\t\t\t for num in permutations(range(SCREAMING_SNAKE_CASE__ ) )\r\n\t\t\t\t\t\t if is_substring_divisible(SCREAMING_SNAKE_CASE__ ) )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tprint(F'''{solution() = }''')\r\n\r\n"},"code_codestyle":{"kind":"number","value":702,"string":"702"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import (\r\n OptionalDependencyNotAvailable,\r\n _LazyModule,\r\n is_flax_available,\r\n is_tf_available,\r\n is_tokenizers_available,\r\n is_torch_available,\r\n)\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] ={\r\n 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'],\r\n 'tokenization_roformer': ['RoFormerTokenizer'],\r\n}\r\n\r\ntry:\r\n\t\tif not is_tokenizers_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =['RoFormerTokenizerFast']\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =[\r\n\t\t 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'RoFormerForCausalLM',\r\n\t\t 'RoFormerForMaskedLM',\r\n\t\t 'RoFormerForMultipleChoice',\r\n\t\t 'RoFormerForQuestionAnswering',\r\n\t\t 'RoFormerForSequenceClassification',\r\n\t\t 'RoFormerForTokenClassification',\r\n\t\t 'RoFormerLayer',\r\n\t\t 'RoFormerModel',\r\n\t\t 'RoFormerPreTrainedModel',\r\n\t\t 'load_tf_weights_in_roformer',\r\n\t\t]\r\n\r\n\r\ntry:\r\n\t\tif not is_tf_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =[\r\n\t\t 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'TFRoFormerForCausalLM',\r\n\t\t 'TFRoFormerForMaskedLM',\r\n\t\t 'TFRoFormerForMultipleChoice',\r\n\t\t 'TFRoFormerForQuestionAnswering',\r\n\t\t 'TFRoFormerForSequenceClassification',\r\n\t\t 'TFRoFormerForTokenClassification',\r\n\t\t 'TFRoFormerLayer',\r\n\t\t 'TFRoFormerModel',\r\n\t\t 'TFRoFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\ntry:\r\n\t\tif not is_flax_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =[\r\n\t\t 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'FlaxRoFormerForMaskedLM',\r\n\t\t 'FlaxRoFormerForMultipleChoice',\r\n\t\t 'FlaxRoFormerForQuestionAnswering',\r\n\t\t 'FlaxRoFormerForSequenceClassification',\r\n\t\t 'FlaxRoFormerForTokenClassification',\r\n\t\t 'FlaxRoFormerModel',\r\n\t\t 'FlaxRoFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig\r\n\t\tfrom .tokenization_roformer import RoFormerTokenizer\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_tokenizers_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .tokenization_roformer_fast import RoFormerTokenizerFast\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_roformer import (\r\n\t\t\t\t ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t RoFormerForCausalLM,\r\n\t\t\t\t RoFormerForMaskedLM,\r\n\t\t\t\t RoFormerForMultipleChoice,\r\n\t\t\t\t RoFormerForQuestionAnswering,\r\n\t\t\t\t RoFormerForSequenceClassification,\r\n\t\t\t\t RoFormerForTokenClassification,\r\n\t\t\t\t RoFormerLayer,\r\n\t\t\t\t RoFormerModel,\r\n\t\t\t\t RoFormerPreTrainedModel,\r\n\t\t\t\t load_tf_weights_in_roformer,\r\n\t\t\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_tf_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_tf_roformer import (\r\n\t\t\t\t TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t TFRoFormerForCausalLM,\r\n\t\t\t\t TFRoFormerForMaskedLM,\r\n\t\t\t\t TFRoFormerForMultipleChoice,\r\n\t\t\t\t TFRoFormerForQuestionAnswering,\r\n\t\t\t\t TFRoFormerForSequenceClassification,\r\n\t\t\t\t TFRoFormerForTokenClassification,\r\n\t\t\t\t TFRoFormerLayer,\r\n\t\t\t\t TFRoFormerModel,\r\n\t\t\t\t TFRoFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_flax_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_flax_roformer import (\r\n\t\t\t\t FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t FlaxRoFormerForMaskedLM,\r\n\t\t\t\t FlaxRoFormerForMultipleChoice,\r\n\t\t\t\t FlaxRoFormerForQuestionAnswering,\r\n\t\t\t\t FlaxRoFormerForSequenceClassification,\r\n\t\t\t\t FlaxRoFormerForTokenClassification,\r\n\t\t\t\t FlaxRoFormerModel,\r\n\t\t\t\t FlaxRoFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305152,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import (\r\n OptionalDependencyNotAvailable,\r\n _LazyModule,\r\n is_sentencepiece_available,\r\n is_tokenizers_available,\r\n is_torch_available,\r\n)\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple ={\"\"\"configuration_reformer\"\"\": [\"\"\"REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP\"\"\", \"\"\"ReformerConfig\"\"\"]}\r\n\r\ntry:\r\n\t\tif not is_sentencepiece_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =[\"\"\"ReformerTokenizer\"\"\"]\r\n\r\ntry:\r\n\t\tif not is_tokenizers_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] =[\"\"\"ReformerTokenizerFast\"\"\"]\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =[\r\n\t\t \"\"\"REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST\"\"\",\r\n\t\t \"\"\"ReformerAttention\"\"\",\r\n\t\t \"\"\"ReformerForMaskedLM\"\"\",\r\n\t\t \"\"\"ReformerForQuestionAnswering\"\"\",\r\n\t\t \"\"\"ReformerForSequenceClassification\"\"\",\r\n\t\t \"\"\"ReformerLayer\"\"\",\r\n\t\t \"\"\"ReformerModel\"\"\",\r\n\t\t \"\"\"ReformerModelWithLMHead\"\"\",\r\n\t\t \"\"\"ReformerPreTrainedModel\"\"\",\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_sentencepiece_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .tokenization_reformer import ReformerTokenizer\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_tokenizers_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .tokenization_reformer_fast import ReformerTokenizerFast\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_reformer import (\r\n\t\t\t\t REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t ReformerAttention,\r\n\t\t\t\t ReformerForMaskedLM,\r\n\t\t\t\t ReformerForQuestionAnswering,\r\n\t\t\t\t ReformerForSequenceClassification,\r\n\t\t\t\t ReformerLayer,\r\n\t\t\t\t ReformerModel,\r\n\t\t\t\t ReformerModelWithLMHead,\r\n\t\t\t\t ReformerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)\r\n\r\n"},"code_codestyle":{"kind":"number","value":703,"string":"703"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\treturn int(input_a == input_a == 0 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tprint(\"\"\"Truth Table of NOR Gate:\"\"\" )\r\n\t\t\t\t\t\tprint(\"\"\"| Input 1 | Input 2 | Output |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 0 | 0 | {nor_gate(0 ,0 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 0 | 1 | {nor_gate(0 ,1 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 1 | 0 | {nor_gate(1 ,0 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 1 | 1 | {nor_gate(1 ,1 )} |\"\"\" )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\t\tmain()\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305153,"cells":{"code":{"kind":"string","value":"\r\r\r\rimport logging\rimport os\rfrom dataclasses import dataclass, field\rfrom typing import Dict, Optional\r\rimport datasets\rimport numpy as np\rimport tensorflow as tf\r\rfrom transformers import (\r AutoConfig,\r AutoTokenizer,\r EvalPrediction,\r HfArgumentParser,\r PreTrainedTokenizer,\r TFAutoModelForSequenceClassification,\r TFTrainer,\r TFTrainingArguments,\r)\rfrom transformers.utils import logging as hf_logging\r\r\rhf_logging.set_verbosity_info()\rhf_logging.enable_default_handler()\rhf_logging.enable_explicit_format()\r\r\r\r\r\r\r\rdef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = None ,):\r\r\r\t\t\t\t\t\t'''simple docstring'''\r\r\r\r\r\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t{}\r\r\t\t\t\t\t\tif train_file is not None:\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[train_file]\r\t\t\t\t\t\tif eval_file is not None:\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[eval_file]\r\t\t\t\t\t\tif test_file is not None:\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[test_file]\r\r\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdatasets.load_dataset(\"\"\"csv\"\"\" ,data_files=_lowerCamelCase )\r\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tlist(ds[list(files.keys() )[0]].features.keys() )\r\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tfeatures_name.pop(_lowerCamelCase )\r\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tlist(set(ds[list(files.keys() )[0]][label_name] ) )\r\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t{label: i for i, label in enumerate(_lowerCamelCase )}\r\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttokenizer.model_input_names\r\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t{}\r\r\t\t\t\t\t\tif len(_lowerCamelCase ) == 1:\r\t\t\t\t\t\t\t\t\t\t\t\tfor k in files.keys():\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tds[k].map(\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t lambda SCREAMING_SNAKE_CASE__ : tokenizer.batch_encode_plus(\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t example[features_name[0]] ,truncation=_lowerCamelCase ,max_length=_lowerCamelCase ,padding=\"\"\"max_length\"\"\" ) ,batched=_lowerCamelCase ,)\r\t\t\t\t\t\telif len(_lowerCamelCase ) == 2:\r\t\t\t\t\t\t\t\t\t\t\t\tfor k in files.keys():\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tds[k].map(\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t lambda SCREAMING_SNAKE_CASE__ : tokenizer.batch_encode_plus(\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t (example[features_name[0]], example[features_name[1]]) ,truncation=_lowerCamelCase ,max_length=_lowerCamelCase ,padding=\"\"\"max_length\"\"\" ,) ,batched=_lowerCamelCase ,)\r\r\t\t\t\t\t\tdef gen_train():\r\t\t\t\t\t\t\t\t\t\t\t\tfor ex in transformed_ds[datasets.Split.TRAIN]:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t{k: v for k, v in ex.items() if k in input_names}\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tlabelaid[ex[label_name]]\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tyield (d, label)\r\r\t\t\t\t\t\tdef gen_val():\r\t\t\t\t\t\t\t\t\t\t\t\tfor ex in transformed_ds[datasets.Split.VALIDATION]:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{k: v for k, v in ex.items() if k in input_names}\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tlabelaid[ex[label_name]]\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tyield (d, label)\r\r\t\t\t\t\t\tdef gen_test():\r\t\t\t\t\t\t\t\t\t\t\t\tfor ex in transformed_ds[datasets.Split.TEST]:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{k: v for k, v in ex.items() if k in input_names}\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tlabelaid[ex[label_name]]\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tyield (d, label)\r\r\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(\r\t\t\t\t\t\t tf.data.Dataset.from_generator(\r\t\t\t\t\t\t _lowerCamelCase ,({k: tf.intaa for k in input_names}, tf.intaa) ,({k: tf.TensorShape([None] ) for k in input_names}, tf.TensorShape([] )) ,)\r\t\t\t\t\t\t if datasets.Split.TRAIN in transformed_ds\r\t\t\t\t\t\t else None\r\t\t\t\t\t\t)\r\r\t\t\t\t\t\tif train_ds is not None:\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttrain_ds.apply(tf.data.experimental.assert_cardinality(len(ds[datasets.Split.TRAIN] ) ) )\r\r\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t(\r\t\t\t\t\t\t tf.data.Dataset.from_generator(\r\t\t\t\t\t\t _lowerCamelCase ,({k: tf.intaa for k in input_names}, tf.intaa) ,({k: tf.TensorShape([None] ) for k in input_names}, tf.TensorShape([] )) ,)\r\t\t\t\t\t\t if datasets.Split.VALIDATION in transformed_ds\r\t\t\t\t\t\t else None\r\t\t\t\t\t\t)\r\r\t\t\t\t\t\tif val_ds is not None:\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tval_ds.apply(tf.data.experimental.assert_cardinality(len(ds[datasets.Split.VALIDATION] ) ) )\r\r\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t(\r\t\t\t\t\t\t tf.data.Dataset.from_generator(\r\t\t\t\t\t\t _lowerCamelCase ,({k: tf.intaa for k in input_names}, tf.intaa) ,({k: tf.TensorShape([None] ) for k in input_names}, tf.TensorShape([] )) ,)\r\t\t\t\t\t\t if datasets.Split.TEST in transformed_ds\r\t\t\t\t\t\t else None\r\t\t\t\t\t\t)\r\r\t\t\t\t\t\tif test_ds is not None:\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttest_ds.apply(tf.data.experimental.assert_cardinality(len(ds[datasets.Split.TEST] ) ) )\r\r\t\t\t\t\t\treturn train_ds, val_ds, test_ds, labelaid\r\r\rlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =logging.getLogger(__name__)\r\r\r\r\r\r\r@dataclass\rclass _a :\r\t\t\t_UpperCamelCase: List[Any] =\t\tfield(metadata={\"help\": \"Which column contains the label\"} )\r\t\t\t_UpperCamelCase: Union[str, Any] =\t\tfield(default=lowercase__ , metadata={\"help\": \"The path of the training file\"} )\r\t\t\t_UpperCamelCase: Union[str, Any] =\t\tfield(default=lowercase__ , metadata={\"help\": \"The path of the development file\"} )\r\t\t\t_UpperCamelCase: Optional[int] =\t\tfield(default=lowercase__ , metadata={\"help\": \"The path of the test file\"} )\r\t\t\t_UpperCamelCase: Tuple =\t\tfield(\r\t\t\t default=128 , metadata={\r\t\t\t \"help\": (\r\t\t\t \"The maximum total input sequence length after tokenization. Sequences longer \"\r\t\t\t \"than this will be truncated, sequences shorter will be padded.\"\r\t\t\t )\r\t\t\t } , )\r\t\t\t_UpperCamelCase: Union[str, Any] =\t\tfield(\r\t\t\t default=lowercase__ , metadata={\"help\": \"Overwrite the cached training and evaluation sets\"} )\r\r\r\r\r\r\r@dataclass\rclass _a :\r\t\t\t_UpperCamelCase: int =\t\tfield(\r\t\t\t metadata={\"help\": \"Path to pretrained model or model identifier from huggingface.co/models\"} )\r\t\t\t_UpperCamelCase: Optional[int] =\t\tfield(\r\t\t\t default=lowercase__ , metadata={\"help\": \"Pretrained config name or path if not the same as model_name\"} )\r\t\t\t_UpperCamelCase: str =\t\tfield(\r\t\t\t default=lowercase__ , metadata={\"help\": \"Pretrained tokenizer name or path if not the same as model_name\"} )\r\t\t\t_UpperCamelCase: List[Any] =\t\tfield(default=lowercase__ , metadata={\"help\": \"Set this flag to use fast tokenization.\"} )\r\t\t\t# If you want to tweak more attributes on your tokenizer, you should do it in a distinct script,\r\t\t\t# or just modify its tokenizer_config.json.\r\t\t\t_UpperCamelCase: Optional[Any] =\t\tfield(\r\t\t\t default=lowercase__ , metadata={\"help\": \"Where do you want to store the pretrained models downloaded from huggingface.co\"} , )\r\r\r\r\r\r\r\rdef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\r\r\t\t\t\t\t\t'''simple docstring'''\r\r\r\r\r\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tHfArgumentParser((ModelArguments, DataTrainingArguments, TFTrainingArguments) )\r\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tparser.parse_args_into_dataclasses()\r\r\t\t\t\t\t\tif (\r\t\t\t\t\t\t os.path.exists(training_args.output_dir )\r\t\t\t\t\t\t and os.listdir(training_args.output_dir )\r\t\t\t\t\t\t and training_args.do_train\r\t\t\t\t\t\t and not training_args.overwrite_output_dir\r\t\t\t\t\t\t):\r\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\t\t\t\t\t\t\t\t\t\t\t\t F\"\"\"Output directory ({training_args.output_dir}) already exists and is not empty. Use\"\"\"\r\t\t\t\t\t\t\t\t\t\t\t\t \"\"\" --overwrite_output_dir to overcome.\"\"\" )\r\r\t\t\t\t\t\t# Setup logging\r\t\t\t\t\t\tlogging.basicConfig(\r\t\t\t\t\t\t format=\"\"\"%(asctime)s - %(levelname)s - %(name)s - %(message)s\"\"\" ,datefmt=\"\"\"%m/%d/%Y %H:%M:%S\"\"\" ,level=logging.INFO ,)\r\t\t\t\t\t\tlogger.info(\r\t\t\t\t\t\t F\"\"\"n_replicas: {training_args.n_replicas}, distributed training: {bool(training_args.n_replicas > 1 )}, \"\"\"\r\t\t\t\t\t\t F\"\"\"16-bits training: {training_args.fpaa}\"\"\" )\r\t\t\t\t\t\tlogger.info(F\"\"\"Training/evaluation parameters {training_args}\"\"\" )\r\r\t\t\t\t\t\t# Load pretrained model and tokenizer\r\t\t\t\t\t\t#\r\t\t\t\t\t\t# Distributed training:\r\t\t\t\t\t\t# The .from_pretrained methods guarantee that only one local process can concurrently\r\t\t\t\t\t\t# download model & vocab.\r\r\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tAutoTokenizer.from_pretrained(\r\t\t\t\t\t\t model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path ,cache_dir=model_args.cache_dir ,)\r\r\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tget_tfds(\r\t\t\t\t\t\t train_file=data_args.train_file ,eval_file=data_args.dev_file ,test_file=data_args.test_file ,tokenizer=_lowerCamelCase ,label_column_id=data_args.label_column_id ,max_seq_length=data_args.max_seq_length ,)\r\r\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tAutoConfig.from_pretrained(\r\t\t\t\t\t\t model_args.config_name if model_args.config_name else model_args.model_name_or_path ,num_labels=len(_lowerCamelCase ) ,labelaid=_lowerCamelCase ,idalabel={id: label for label, id in labelaid.items()} ,finetuning_task=\"\"\"text-classification\"\"\" ,cache_dir=model_args.cache_dir ,)\r\r\t\t\t\t\t\twith training_args.strategy.scope():\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tTFAutoModelForSequenceClassification.from_pretrained(\r\t\t\t\t\t\t\t\t\t\t\t\t model_args.model_name_or_path ,from_pt=bool(\"\"\".bin\"\"\" in model_args.model_name_or_path ) ,config=_lowerCamelCase ,cache_dir=model_args.cache_dir ,)\r\r\t\t\t\t\t\tdef compute_metrics(SCREAMING_SNAKE_CASE__ ) -> Dict:\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.argmax(p.predictions ,axis=1 )\r\r\t\t\t\t\t\t\t\t\t\t\t\treturn {\"acc\": (preds == p.label_ids).mean()}\r\r\t\t\t\t\t\t# Initialize our Trainer\r\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tTFTrainer(\r\t\t\t\t\t\t model=_lowerCamelCase ,args=_lowerCamelCase ,train_dataset=_lowerCamelCase ,eval_dataset=_lowerCamelCase ,compute_metrics=_lowerCamelCase ,)\r\r\t\t\t\t\t\t# Training\r\t\t\t\t\t\tif training_args.do_train:\r\t\t\t\t\t\t\t\t\t\t\t\ttrainer.train()\r\t\t\t\t\t\t\t\t\t\t\t\ttrainer.save_model()\r\t\t\t\t\t\t\t\t\t\t\t\ttokenizer.save_pretrained(training_args.output_dir )\r\r\t\t\t\t\t\t# Evaluation\r\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t{}\r\t\t\t\t\t\tif training_args.do_eval:\r\t\t\t\t\t\t\t\t\t\t\t\tlogger.info(\"\"\"*** Evaluate ***\"\"\" )\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttrainer.evaluate()\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(training_args.output_dir ,\"\"\"eval_results.txt\"\"\" )\r\r\t\t\t\t\t\t\t\t\t\t\t\twith open(_lowerCamelCase ,\"\"\"w\"\"\" ) as writer:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlogger.info(\"\"\"***** Eval results *****\"\"\" )\r\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor key, value in result.items():\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlogger.info(F\"\"\" {key} = {value}\"\"\" )\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twriter.write(F\"\"\"{key} = {value}\\n\"\"\" )\r\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tresults.update(_lowerCamelCase )\r\r\t\t\t\t\t\treturn results\r\r\rif __name__ == \"__main__\":\r\t\tmain()\r\r"},"code_codestyle":{"kind":"number","value":704,"string":"704"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ={\r\n 'configuration_poolformer': [\r\n 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP',\r\n 'PoolFormerConfig',\r\n 'PoolFormerOnnxConfig',\r\n ]\r\n}\r\n\r\ntry:\r\n\t\tif not is_vision_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =['PoolFormerFeatureExtractor']\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =['PoolFormerImageProcessor']\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =[\r\n\t\t 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'PoolFormerForImageClassification',\r\n\t\t 'PoolFormerModel',\r\n\t\t 'PoolFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_poolformer import (\r\n\t\t POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP,\r\n\t\t PoolFormerConfig,\r\n\t\t PoolFormerOnnxConfig,\r\n\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_vision_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .feature_extraction_poolformer import PoolFormerFeatureExtractor\r\n\t\t\t\tfrom .image_processing_poolformer import PoolFormerImageProcessor\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_poolformer import (\r\n\t\t\t\t POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t PoolFormerForImageClassification,\r\n\t\t\t\t PoolFormerModel,\r\n\t\t\t\t PoolFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =_LazyModule(__name__, globals()['__file__'], _import_structure)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305154,"cells":{"code":{"kind":"string","value":"import json\r\nimport os\r\nimport tempfile\r\n\r\nfrom transformers.testing_utils import check_json_file_has_correct_format\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\t\t\t_UpperCamelCase: str =\t\tNone\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.feature_extraction_class(**self.feat_extract_dict )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tjson.loads(feat_extract.to_json_string() )\r\n\t\t\t\t\t\t\t\t\tfor key, value in self.feat_extract_dict.items():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(obj[key]\t\t\t\t\t\t, _a )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.feature_extraction_class(**self.feat_extract_dict )\r\n\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tos.path.join(_a\t\t\t\t\t\t, \"\"\"feat_extract.json\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfeat_extract_first.to_json_file(_a )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.feature_extraction_class.from_json_file(_a )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(feat_extract_second.to_dict()\t\t\t\t\t\t, feat_extract_first.to_dict() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.feature_extraction_class(**self.feat_extract_dict )\r\n\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tfeat_extract_first.save_pretrained(_a )[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcheck_json_file_has_correct_format(_a )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tself.feature_extraction_class.from_pretrained(_a )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(feat_extract_second.to_dict()\t\t\t\t\t\t, feat_extract_first.to_dict() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.feature_extraction_class()\r\n\t\t\t\t\t\t\t\t\tself.assertIsNotNone(_a )\r\n\r\n"},"code_codestyle":{"kind":"number","value":705,"string":"705"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport os\r\nimport string\r\nimport sys\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =1 << 8\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={\r\n 'tab': ord('\\t'),\r\n 'newline': ord('\\r'),\r\n 'esc': 27,\r\n 'up': 65 + ARROW_KEY_FLAG,\r\n 'down': 66 + ARROW_KEY_FLAG,\r\n 'right': 67 + ARROW_KEY_FLAG,\r\n 'left': 68 + ARROW_KEY_FLAG,\r\n 'mod_int': 91,\r\n 'undefined': sys.maxsize,\r\n 'interrupt': 3,\r\n 'insert': 50,\r\n 'delete': 51,\r\n 'pg_up': 53,\r\n 'pg_down': 54,\r\n}\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =KEYMAP['up']\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =KEYMAP['left']\r\n\r\nif sys.platform == \"win32\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =[]\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ={\r\n\t\t b'\\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00H': KEYMAP['up'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00P': KEYMAP['down'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00M': KEYMAP['right'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00K': KEYMAP['left'] - ARROW_KEY_FLAG,\r\n\t\t}\r\n\r\nfor i in range(10):\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =ord(str(i))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif os.name == \"nt\":\r\n\t\t\t\t\t\t\t\t\t\t\t\timport msvcrt\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t\"\"\"mbcs\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Flush the keyboard buffer\r\n\t\t\t\t\t\t\t\t\t\t\t\twhile msvcrt.kbhit():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmsvcrt.getch()\r\n\t\t\t\t\t\t\t\t\t\t\t\tif len(SCREAMING_SNAKE_CASE__ ) == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Read the keystroke\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmsvcrt.getch()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# If it is a prefix char, get second part\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ch in (b\"\\x00\", b\"\\xe0\"):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tch + msvcrt.getch()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Translate actual Win chars to bullet char types\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tchr(WIN_KEYMAP[cha] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(chr(KEYMAP[\"\"\"mod_int\"\"\"] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) in (\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"insert\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"delete\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"pg_up\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"pg_down\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(chr(1_2_6 ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tchr(KEYMAP[\"\"\"esc\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texcept KeyError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tcha[1]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tch.decode(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tWIN_CH_BUFFER.pop(0 )\r\n\t\t\t\t\t\telif os.name == \"posix\":\r\n\t\t\t\t\t\t\t\t\t\t\t\timport termios\r\n\t\t\t\t\t\t\t\t\t\t\t\timport tty\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tsys.stdin.fileno()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttermios.tcgetattr(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttty.setraw(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tsys.stdin.read(1 )\r\n\t\t\t\t\t\t\t\t\t\t\t\tfinally:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttermios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\treturn ch\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP[\"interrupt\"], KEYMAP[\"newline\"]]:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn char\r\n\r\n\t\t\t\t\t\telif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP[\"esc\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP[\"mod_int\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP[\"arrow_begin\"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP[\"arrow_end\"] - ARROW_KEY_FLAG:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn KEYMAP[\"undefined\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn get_raw_chars()\r\n\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif char in string.printable:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn char\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn KEYMAP[\"undefined\"]\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305155,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport os\r\nimport re\r\nimport shutil\r\nimport tempfile\r\nimport unittest\r\nfrom typing import Tuple\r\n\r\nfrom transformers import AddedToken, BatchEncoding, ByTaTokenizer\r\nfrom transformers.utils import cached_property, is_tf_available, is_torch_available\r\n\r\nfrom ...test_tokenization_common import TokenizerTesterMixin\r\n\r\n\r\nif is_torch_available():\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] ='pt'\r\nelif is_tf_available():\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict ='tf'\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ='jax'\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\t__a , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\tByTaTokenizer\r\n\t\t\t_UpperCamelCase: Dict =\t\tFalse\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tsuper().setUp()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tByTaTokenizer()\r\n\t\t\t\t\t\t\t\t\ttokenizer.save_pretrained(self.tmpdirname )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@cached_property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn ByTaTokenizer.from_pretrained(\"\"\"google/byt5-small\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.tokenizer_class.from_pretrained(self.tmpdirname\t\t\t\t\t\t, **a_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=20\t\t\t\t\t\t, lowercase_=5 ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tfor i in range(len(a_ ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer.decode([i]\t\t\t\t\t\t, clean_up_tokenization_spaces=a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texcept UnicodeDecodeError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpass\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttoks.append((i, tok) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tlist(filter(lambda lowercase_ : re.match(R\"\"\"^[ a-zA-Z]+$\"\"\"\t\t\t\t\t\t, t[1] )\t\t\t\t\t\t, a_ ) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tlist(filter(lambda lowercase_ : [t[0]] == tokenizer.encode(t[1]\t\t\t\t\t\t, add_special_tokens=a_ )\t\t\t\t\t\t, a_ ) )\r\n\t\t\t\t\t\t\t\t\tif max_length is not None and len(a_ ) > max_length:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttoks[:max_length]\r\n\t\t\t\t\t\t\t\t\tif min_length is not None and len(a_ ) < min_length and len(a_ ) > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twhile len(a_ ) < min_length:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttoks + toks\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t # toks_str = [t[1] for t in toks]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[t[0] for t in toks]\r\n\r\n\t\t\t\t\t\t\t\t\t# Ensure consistency\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer.decode(a_\t\t\t\t\t\t, clean_up_tokenization_spaces=a_ )\r\n\t\t\t\t\t\t\t\t\tif \" \" not in output_txt and len(a_ ) > 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t tokenizer.decode([toks_ids[0]]\t\t\t\t\t\t, clean_up_tokenization_spaces=a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t + \"\"\" \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t + tokenizer.decode(toks_ids[1:]\t\t\t\t\t\t, clean_up_tokenization_spaces=a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\tif with_prefix_space:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t\"\"\" \"\"\" + output_txt\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttokenizer.encode(a_\t\t\t\t\t\t, add_special_tokens=a_ )\r\n\t\t\t\t\t\t\t\t\treturn output_txt, output_ids\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.ta_base_tokenizer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer([\"\"\"hi\"\"\", \"\"\"I went to the gym\"\"\", \"\"\"\"\"\"] )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttokenizer([\"\"\"hi\"\"\", \"\"\"I went to the gym\"\"\", \"\"\"\"\"\"] )\r\n\t\t\t\t\t\t\t\t\tself.assertListEqual(batch_with_eos_added[\"\"\"input_ids\"\"\"]\t\t\t\t\t\t, batch_without_eos_added[\"\"\"input_ids\"\"\"] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.ta_base_tokenizer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t\"\"\"Unicode €.\"\"\"\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttokenizer(a_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[88, 113, 108, 102, 114, 103, 104, 35, 229, 133, 175, 49, 1]\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoded[\"\"\"input_ids\"\"\"]\t\t\t\t\t\t, a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t# decoding\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttokenizer.decode(a_ )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(a_\t\t\t\t\t\t, \"\"\"Unicode €.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttokenizer(\"\"\"e è é ê ë\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[104, 35, 198, 171, 35, 198, 172, 35, 198, 173, 35, 198, 174, 1]\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoded[\"\"\"input_ids\"\"\"]\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\t# decoding\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttokenizer.decode(a_ )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(a_\t\t\t\t\t\t, \"\"\"e è é ê ë\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# encode/decode, but with `encode` instead of `__call__`\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(tokenizer.decode(tokenizer.encode(\"\"\"e è é ê ë\"\"\" ) )\t\t\t\t\t\t, \"\"\"e è é ê ë\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.ta_base_tokenizer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[\"\"\"A long paragraph for summarization.\"\"\", \"\"\"Another paragraph for summarization.\"\"\"]\r\n\t\t\t\t\t\t\t\t\t# fmt: off\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[68, 35, 111, 114, 113, 106, 35, 115, 100, 117, 100, 106, 117, 100, 115, 107, 35, 105, 114, 117, 35, 118, 120, 112, 112, 100, 117, 108, 125, 100, 119, 108, 114, 113, 49, 1, 0]\r\n\t\t\t\t\t\t\t\t\t# fmt: on\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttokenizer(a_\t\t\t\t\t\t, padding=a_\t\t\t\t\t\t, return_tensors=a_ )\r\n\t\t\t\t\t\t\t\t\tself.assertIsInstance(a_\t\t\t\t\t\t, a_ )\r\n\r\n\t\t\t\t\t\t\t\t\tif FRAMEWORK != \"jax\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tlist(batch.input_ids.numpy()[0] )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tlist(batch.input_ids.tolist()[0] )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertListEqual(a_\t\t\t\t\t\t, a_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual((2, 37)\t\t\t\t\t\t, batch.input_ids.shape )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual((2, 37)\t\t\t\t\t\t, batch.attention_mask.shape )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.ta_base_tokenizer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[\"\"\"A long paragraph for summarization.\"\"\", \"\"\"Another paragraph for summarization.\"\"\"]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttokenizer(a_\t\t\t\t\t\t, padding=a_\t\t\t\t\t\t, return_tensors=a_ )\r\n\t\t\t\t\t\t\t\t\t# check if input_ids are returned and no decoder_input_ids\r\n\t\t\t\t\t\t\t\t\tself.assertIn(\"\"\"input_ids\"\"\"\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\tself.assertIn(\"\"\"attention_mask\"\"\"\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\tself.assertNotIn(\"\"\"decoder_input_ids\"\"\"\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\tself.assertNotIn(\"\"\"decoder_attention_mask\"\"\"\t\t\t\t\t\t, a_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.ta_base_tokenizer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t \"\"\"Summary of the text.\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"Another summary.\"\"\",\r\n\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttokenizer(\r\n\t\t\t\t\t\t\t\t\t text_target=a_\t\t\t\t\t\t, max_length=32\t\t\t\t\t\t, padding=\"\"\"max_length\"\"\"\t\t\t\t\t\t, truncation=a_\t\t\t\t\t\t, return_tensors=a_ )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(32\t\t\t\t\t\t, targets[\"\"\"input_ids\"\"\"].shape[1] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.ta_base_tokenizer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[\"\"\"A long paragraph for summarization. \"\"\"]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\"\"\"Summary of the text. \"\"\"]\r\n\t\t\t\t\t\t\t\t\t# fmt: off\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[68, 35, 111, 114, 113, 106, 35, 115, 100, 117, 100, 106, 117, 100, 115, 107, 35, 105, 114, 117, 35, 118, 120, 112, 112, 100, 117, 108, 125, 100, 119, 108, 114, 113, 49, 35, 1]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[86, 120, 112, 112, 100, 117, 124, 35, 114, 105, 35, 119, 107, 104, 35, 119, 104, 123, 119, 49, 35, 1]\r\n\t\t\t\t\t\t\t\t\t# fmt: on\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer(a_\t\t\t\t\t\t, text_target=a_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(a_\t\t\t\t\t\t, batch[\"\"\"input_ids\"\"\"][0] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(a_\t\t\t\t\t\t, batch[\"\"\"labels\"\"\"][0] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.get_tokenizers()\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in tokenizers:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith self.subTest(f\"\"\"{tokenizer.__class__.__name__}\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertNotEqual(tokenizer.model_max_length\t\t\t\t\t\t, 42 )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t # Now let's start the test\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.get_tokenizers()\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in tokenizers:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith self.subTest(f\"\"\"{tokenizer.__class__.__name__}\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Isolate this from the other tests because we save additional tokens/etc\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttempfile.mkdtemp()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t\"\"\" He is very happy, UNwant\\u00E9d,running\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttokenizer.encode(a_\t\t\t\t\t\t, add_special_tokens=a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer.save_pretrained(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttokenizer.__class__.from_pretrained(a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tafter_tokenizer.encode(a_\t\t\t\t\t\t, add_special_tokens=a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(a_\t\t\t\t\t\t, a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tshutil.rmtree(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself.get_tokenizers(model_max_length=42 )\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in tokenizers:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith self.subTest(f\"\"\"{tokenizer.__class__.__name__}\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Isolate this from the other tests because we save additional tokens/etc\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttempfile.mkdtemp()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t\"\"\" He is very happy, UNwant\\u00E9d,running\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer.add_tokens([\"\"\"bim\"\"\", \"\"\"bambam\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer.additional_special_tokens\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tadditional_special_tokens.append(\"\"\"new_additional_special_token\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer.add_special_tokens({\"\"\"additional_special_tokens\"\"\": additional_special_tokens} )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttokenizer.encode(a_\t\t\t\t\t\t, add_special_tokens=a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer.save_pretrained(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer.__class__.from_pretrained(a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tafter_tokenizer.encode(a_\t\t\t\t\t\t, add_special_tokens=a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(a_\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIn(\"\"\"new_additional_special_token\"\"\"\t\t\t\t\t\t, after_tokenizer.additional_special_tokens )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(after_tokenizer.model_max_length\t\t\t\t\t\t, 42 )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttokenizer.__class__.from_pretrained(a_\t\t\t\t\t\t, model_max_length=43 )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(tokenizer.model_max_length\t\t\t\t\t\t, 43 )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tshutil.rmtree(a_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tif self.test_slow_tokenizer:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer_list.append((self.tokenizer_class, self.get_tokenizer()) )\r\n\r\n\t\t\t\t\t\t\t\t\tif self.test_rust_tokenizer:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer_list.append((self.rust_tokenizer_class, self.get_rust_tokenizer()) )\r\n\r\n\t\t\t\t\t\t\t\t\tfor tokenizer_class, tokenizer_utils in tokenizer_list:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer_utils.save_pretrained(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(a_\t\t\t\t\t\t, \"\"\"special_tokens_map.json\"\"\" )\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as json_file:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tjson.load(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(a_\t\t\t\t\t\t, \"\"\"tokenizer_config.json\"\"\" )\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as json_file:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tjson.load(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[f\"\"\"\"\"\" for i in range(125 )]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tadded_tokens_extra_ids + [\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"an_additional_special_token\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tadded_tokens_extra_ids + [\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"an_additional_special_token\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(a_\t\t\t\t\t\t, \"\"\"special_tokens_map.json\"\"\" )\t\t\t\t\t\t, \"\"\"w\"\"\"\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as outfile:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjson.dump(a_\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(a_\t\t\t\t\t\t, \"\"\"tokenizer_config.json\"\"\" )\t\t\t\t\t\t, \"\"\"w\"\"\"\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as outfile:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjson.dump(a_\t\t\t\t\t\t, a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# the following checks allow us to verify that our test works as expected, i.e. that the tokenizer takes\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# into account the new value of additional_special_tokens given in the \"tokenizer_config.json\" and\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# \"special_tokens_map.json\" files\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttokenizer_class.from_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t a_\t\t\t\t\t\t, )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIn(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"an_additional_special_token\"\"\"\t\t\t\t\t\t, tokenizer_without_change_in_init.additional_special_tokens )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# self.assertIn(\"an_additional_special_token\",tokenizer_without_change_in_init.get_vocab()) # ByT5Tokenization no vocab\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [\"\"\"an_additional_special_token\"\"\"]\t\t\t\t\t\t, tokenizer_without_change_in_init.convert_ids_to_tokens(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t tokenizer_without_change_in_init.convert_tokens_to_ids([\"\"\"an_additional_special_token\"\"\"] ) )\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Now we test that we can change the value of additional_special_tokens in the from_pretrained\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tadded_tokens_extra_ids + [AddedToken(\"\"\"a_new_additional_special_token\"\"\"\t\t\t\t\t\t, lstrip=a_ )]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttokenizer_class.from_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t a_\t\t\t\t\t\t, additional_special_tokens=a_\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIn(\"\"\"a_new_additional_special_token\"\"\"\t\t\t\t\t\t, tokenizer.additional_special_tokens )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [\"\"\"a_new_additional_special_token\"\"\"]\t\t\t\t\t\t, tokenizer.convert_ids_to_tokens(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t tokenizer.convert_tokens_to_ids([\"\"\"a_new_additional_special_token\"\"\"] ) )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tif self.test_slow_tokenizer:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer_list.append((self.tokenizer_class, self.get_tokenizer()) )\r\n\r\n\t\t\t\t\t\t\t\t\tif self.test_rust_tokenizer:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer_list.append((self.rust_tokenizer_class, self.get_rust_tokenizer()) )\r\n\r\n\t\t\t\t\t\t\t\t\tfor tokenizer_class, tokenizer_utils in tokenizer_list:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttokenizer_utils.save_pretrained(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer_class.from_pretrained(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(tokenizer.decode([255] ) == \"\"\"\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.get_tokenizers(fast=a_\t\t\t\t\t\t, do_lower_case=a_ )\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in tokenizers:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith self.subTest(f\"\"\"{tokenizer.__class__.__name__}\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\"\"\"t\"\"\", \"\"\"h\"\"\", \"\"\"i\"\"\", \"\"\"s\"\"\", \"\"\" \"\"\", \"\"\"i\"\"\", \"\"\"s\"\"\", \"\"\" \"\"\", \"\"\"a\"\"\", \"\"\" \"\"\", \"\"\"t\"\"\", \"\"\"e\"\"\", \"\"\"x\"\"\", \"\"\"t\"\"\", \"\"\"\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttokenizer.convert_tokens_to_string(a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(a_\t\t\t\t\t\t, a_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.get_tokenizers()\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in tokenizers:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith self.subTest(f\"\"\"{tokenizer.__class__.__name__}\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"bos_token\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"eos_token\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"unk_token\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"sep_token\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"pad_token\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"cls_token\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"mask_token\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttokenizer.convert_ids_to_tokens(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t a_\t\t\t\t\t\t, skip_special_tokens=a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor attr in attributes_list:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsetattr(a_\t\t\t\t\t\t, attr + \"\"\"_id\"\"\"\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(getattr(a_\t\t\t\t\t\t, a_ )\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(getattr(a_\t\t\t\t\t\t, attr + \"\"\"_id\"\"\" )\t\t\t\t\t\t, a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsetattr(a_\t\t\t\t\t\t, attr + \"\"\"_id\"\"\"\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(getattr(a_\t\t\t\t\t\t, a_ )\t\t\t\t\t\t, a_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(getattr(a_\t\t\t\t\t\t, attr + \"\"\"_id\"\"\" )\t\t\t\t\t\t, a_ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsetattr(a_\t\t\t\t\t\t, \"\"\"additional_special_tokens_ids\"\"\"\t\t\t\t\t\t, [] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(getattr(a_\t\t\t\t\t\t, \"\"\"additional_special_tokens\"\"\" )\t\t\t\t\t\t, [] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(getattr(a_\t\t\t\t\t\t, \"\"\"additional_special_tokens_ids\"\"\" )\t\t\t\t\t\t, [] )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsetattr(a_\t\t\t\t\t\t, \"\"\"additional_special_tokens_ids\"\"\"\t\t\t\t\t\t, [token_id_to_test_setters] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(getattr(a_\t\t\t\t\t\t, \"\"\"additional_special_tokens\"\"\" )\t\t\t\t\t\t, [token_to_test_setters] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(getattr(a_\t\t\t\t\t\t, \"\"\"additional_special_tokens_ids\"\"\" )\t\t\t\t\t\t, [token_id_to_test_setters] )\r\n\r\n"},"code_codestyle":{"kind":"number","value":706,"string":"706"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\n# Imports\r\nimport numpy as np\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tself.set_matricies(red=lowercase_\t\t\t\t\t\t, green=lowercase_\t\t\t\t\t\t, blue=lowercase_\t\t\t\t\t\t, red_edge=lowercase_\t\t\t\t\t\t, nir=lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tif red is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tred\r\n\t\t\t\t\t\t\t\t\tif green is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tgreen\r\n\t\t\t\t\t\t\t\t\tif blue is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tblue\r\n\t\t\t\t\t\t\t\t\tif red_edge is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tred_edge\r\n\t\t\t\t\t\t\t\t\tif nir is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tnir\r\n\t\t\t\t\t\t\t\t\treturn True\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=\"\"\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tself.set_matricies(red=lowercase_\t\t\t\t\t\t, green=lowercase_\t\t\t\t\t\t, blue=lowercase_\t\t\t\t\t\t, red_edge=lowercase_\t\t\t\t\t\t, nir=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"ARVI2\"\"\": self.arvaa,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CCCI\"\"\": self.ccci,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CVI\"\"\": self.cvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GLI\"\"\": self.gli,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NDVI\"\"\": self.ndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"BNDVI\"\"\": self.bndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"redEdgeNDVI\"\"\": self.red_edge_ndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GNDVI\"\"\": self.gndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GBNDVI\"\"\": self.gbndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GRNDVI\"\"\": self.grndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RBNDVI\"\"\": self.rbndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"PNDVI\"\"\": self.pndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"ATSAVI\"\"\": self.atsavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"BWDRVI\"\"\": self.bwdrvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CIgreen\"\"\": self.ci_green,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CIrededge\"\"\": self.ci_rededge,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CI\"\"\": self.ci,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CTVI\"\"\": self.ctvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GDVI\"\"\": self.gdvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"EVI\"\"\": self.evi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GEMI\"\"\": self.gemi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GOSAVI\"\"\": self.gosavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GSAVI\"\"\": self.gsavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"Hue\"\"\": self.hue,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IVI\"\"\": self.ivi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IPVI\"\"\": self.ipvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"I\"\"\": self.i,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RVI\"\"\": self.rvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"MRVI\"\"\": self.mrvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"MSAVI\"\"\": self.m_savi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormG\"\"\": self.norm_g,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormNIR\"\"\": self.norm_nir,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormR\"\"\": self.norm_r,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NGRDI\"\"\": self.ngrdi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RI\"\"\": self.ri,\r\n\t\t\t\t\t\t\t\t\t \"\"\"S\"\"\": self.s,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IF\"\"\": self._if,\r\n\t\t\t\t\t\t\t\t\t \"\"\"DVI\"\"\": self.dvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"TVI\"\"\": self.tvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NDRE\"\"\": self.ndre,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn funcs[index]()\r\n\t\t\t\t\t\t\t\t\texcept KeyError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(\"\"\"Index not in the list!\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red)))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / (\r\n\t\t\t\t\t\t\t\t\t (self.nir - self.red) / (self.nir + self.red)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir * (self.red / (self.green**2))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (2 * self.green - self.red - self.blue) / (\r\n\t\t\t\t\t\t\t\t\t 2 * self.green + self.red + self.blue\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.red) / (self.nir + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.blue) / (self.nir + self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.redEdge - self.red) / (self.redEdge + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.green) / (self.nir + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.blue)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.blue)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.red)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.red)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.red + self.blue)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.red + self.blue)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.0_8\t\t\t\t\t\t, lowercase_=1.2_2\t\t\t\t\t\t, lowercase_=0.0_3 ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn a * (\r\n\t\t\t\t\t\t\t\t\t (self.nir - a * self.red - b)\r\n\t\t\t\t\t\t\t\t\t / (a * self.nir + self.red - a * b + x * (1 + a**2))\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / self.green) - 1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / self.redEdge) - 1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.red - self.blue) / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.ndvi()\r\n\t\t\t\t\t\t\t\t\treturn ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir - self.green\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn 2.5 * (\r\n\t\t\t\t\t\t\t\t\t (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + self.red + 0.5\r\n\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\treturn n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.1_6 ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.green) / (self.nir + self.green + y)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.5 ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn np.arctan(\r\n\t\t\t\t\t\t\t\t\t ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - b) / (a * self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.red + self.green + self.blue) / 3_0.5\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.rvi() - 1) / (self.rvi() + 1)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn (\r\n\t\t\t\t\t\t\t\t\t (2 * self.nir + 1)\r\n\t\t\t\t\t\t\t\t\t - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2)\r\n\t\t\t\t\t\t\t\t\t) / 2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.green / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn self.red / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.green - self.red) / (self.green + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.red - self.green) / (self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnp.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] )\r\n\t\t\t\t\t\t\t\t\treturn (max_value - min_value) / max_value\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (2 * self.red - self.green - self.blue) / (self.green - self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.ndvi() + 0.5) ** (1 / 2)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.redEdge) / (self.nir + self.redEdge)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305156,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import Dict, List, Optional, Union\r\n\r\nimport numpy as np\r\n\r\nfrom ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict\r\nfrom ...image_transforms import (\r\n center_crop,\r\n convert_to_rgb,\r\n get_resize_output_image_size,\r\n normalize,\r\n rescale,\r\n resize,\r\n to_channel_dimension_format,\r\n)\r\nfrom ...image_utils import (\r\n OPENAI_CLIP_MEAN,\r\n OPENAI_CLIP_STD,\r\n ChannelDimension,\r\n ImageInput,\r\n PILImageResampling,\r\n make_list_of_images,\r\n to_numpy_array,\r\n valid_images,\r\n)\r\nfrom ...utils import TensorType, is_vision_available, logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =logging.get_logger(__name__)\r\n\r\n\r\nif is_vision_available():\r\n\t\timport PIL\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: List[Any] =\t\t[\"pixel_values\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = PILImageResampling.BICUBIC\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = 1 / 255\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(**__lowerCAmelCase )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tsize if size is not None else {\"\"\"shortest_edge\"\"\": 224}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tget_size_dict(__lowerCAmelCase\t\t\t\t\t\t, default_to_square=__lowerCAmelCase )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tcrop_size if crop_size is not None else {\"\"\"height\"\"\": 224, \"\"\"width\"\"\": 224}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tget_size_dict(__lowerCAmelCase\t\t\t\t\t\t, default_to_square=__lowerCAmelCase\t\t\t\t\t\t, param_name=\"\"\"crop_size\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdo_resize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tsize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tresample\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdo_center_crop\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tcrop_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdo_rescale\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\trescale_factor\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdo_normalize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\timage_mean if image_mean is not None else OPENAI_CLIP_MEAN\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\timage_std if image_std is not None else OPENAI_CLIP_STD\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdo_convert_rgb\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = PILImageResampling.BICUBIC\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tget_size_dict(__lowerCAmelCase\t\t\t\t\t\t, default_to_square=__lowerCAmelCase )\r\n\t\t\t\t\t\t\t\t\tif \"shortest_edge\" not in size:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(f\"\"\"The `size` parameter must contain the key `shortest_edge`. Got {size.keys()}\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tget_resize_output_image_size(__lowerCAmelCase\t\t\t\t\t\t, size=size[\"\"\"shortest_edge\"\"\"]\t\t\t\t\t\t, default_to_square=__lowerCAmelCase )\r\n\t\t\t\t\t\t\t\t\treturn resize(__lowerCAmelCase\t\t\t\t\t\t, size=__lowerCAmelCase\t\t\t\t\t\t, resample=__lowerCAmelCase\t\t\t\t\t\t, data_format=__lowerCAmelCase\t\t\t\t\t\t, **__lowerCAmelCase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tget_size_dict(__lowerCAmelCase )\r\n\t\t\t\t\t\t\t\t\tif \"height\" not in size or \"width\" not in size:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(f\"\"\"The `size` parameter must contain the keys (height, width). Got {size.keys()}\"\"\" )\r\n\t\t\t\t\t\t\t\t\treturn center_crop(__lowerCAmelCase\t\t\t\t\t\t, size=(size[\"\"\"height\"\"\"], size[\"\"\"width\"\"\"])\t\t\t\t\t\t, data_format=__lowerCAmelCase\t\t\t\t\t\t, **__lowerCAmelCase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn rescale(__lowerCAmelCase\t\t\t\t\t\t, scale=__lowerCAmelCase\t\t\t\t\t\t, data_format=__lowerCAmelCase\t\t\t\t\t\t, **__lowerCAmelCase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn normalize(__lowerCAmelCase\t\t\t\t\t\t, mean=__lowerCAmelCase\t\t\t\t\t\t, std=__lowerCAmelCase\t\t\t\t\t\t, data_format=__lowerCAmelCase\t\t\t\t\t\t, **__lowerCAmelCase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = ChannelDimension.FIRST\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdo_resize if do_resize is not None else self.do_resize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tsize if size is not None else self.size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tget_size_dict(__lowerCAmelCase\t\t\t\t\t\t, param_name=\"\"\"size\"\"\"\t\t\t\t\t\t, default_to_square=__lowerCAmelCase )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tresample if resample is not None else self.resample\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdo_center_crop if do_center_crop is not None else self.do_center_crop\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tcrop_size if crop_size is not None else self.crop_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tget_size_dict(__lowerCAmelCase\t\t\t\t\t\t, param_name=\"\"\"crop_size\"\"\"\t\t\t\t\t\t, default_to_square=__lowerCAmelCase )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tdo_rescale if do_rescale is not None else self.do_rescale\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\trescale_factor if rescale_factor is not None else self.rescale_factor\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdo_normalize if do_normalize is not None else self.do_normalize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\timage_mean if image_mean is not None else self.image_mean\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\timage_std if image_std is not None else self.image_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdo_convert_rgb if do_convert_rgb is not None else self.do_convert_rgb\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmake_list_of_images(__lowerCAmelCase )\r\n\r\n\t\t\t\t\t\t\t\t\tif not valid_images(__lowerCAmelCase ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"torch.Tensor, tf.Tensor or jax.ndarray.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tif do_resize and size is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"Size must be specified if do_resize is True.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tif do_center_crop and crop_size is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"Crop size must be specified if do_center_crop is True.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tif do_rescale and rescale_factor is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"Rescale factor must be specified if do_rescale is True.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tif do_normalize and (image_mean is None or image_std is None):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"Image mean and std must be specified if do_normalize is True.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# PIL RGBA images are converted to RGB\r\n\t\t\t\t\t\t\t\t\tif do_convert_rgb:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[convert_to_rgb(__lowerCAmelCase ) for image in images]\r\n\r\n\t\t\t\t\t\t\t\t\t# All transformations expect numpy arrays.\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[to_numpy_array(__lowerCAmelCase ) for image in images]\r\n\r\n\t\t\t\t\t\t\t\t\tif do_resize:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[self.resize(image=__lowerCAmelCase\t\t\t\t\t\t, size=__lowerCAmelCase\t\t\t\t\t\t, resample=__lowerCAmelCase ) for image in images]\r\n\r\n\t\t\t\t\t\t\t\t\tif do_center_crop:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[self.center_crop(image=__lowerCAmelCase\t\t\t\t\t\t, size=__lowerCAmelCase ) for image in images]\r\n\r\n\t\t\t\t\t\t\t\t\tif do_rescale:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[self.rescale(image=__lowerCAmelCase\t\t\t\t\t\t, scale=__lowerCAmelCase ) for image in images]\r\n\r\n\t\t\t\t\t\t\t\t\tif do_normalize:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[self.normalize(image=__lowerCAmelCase\t\t\t\t\t\t, mean=__lowerCAmelCase\t\t\t\t\t\t, std=__lowerCAmelCase ) for image in images]\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[to_channel_dimension_format(__lowerCAmelCase\t\t\t\t\t\t, __lowerCAmelCase ) for image in images]\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t{\"\"\"pixel_values\"\"\": images}\r\n\t\t\t\t\t\t\t\t\treturn BatchFeature(data=__lowerCAmelCase\t\t\t\t\t\t, tensor_type=__lowerCAmelCase )\r\n\r\n\r\n\r\n\r\n"},"code_codestyle":{"kind":"number","value":707,"string":"707"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport argparse\r\nimport json\r\nimport math\r\nimport os\r\nimport time\r\nimport traceback\r\nimport zipfile\r\nfrom collections import Counter\r\n\r\nimport requests\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif token is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{\"\"\"Accept\"\"\": \"\"\"application/vnd.github+json\"\"\", \"\"\"Authorization\"\"\": F\"\"\"Bearer {token}\"\"\"}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tF\"\"\"https://api.github.com/repos/huggingface/transformers/actions/runs/{workflow_run_id}/jobs?per_page=100\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t{}\r\n\r\n\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\tjob_links.update({job[\"\"\"name\"\"\"]: job[\"\"\"html_url\"\"\"] for job in result[\"\"\"jobs\"\"\"]} )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmath.ceil((result[\"\"\"total_count\"\"\"] - 1_0_0) / 1_0_0 )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor i in range(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\trequests.get(url + F\"\"\"&page={i + 2}\"\"\" ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjob_links.update({job[\"\"\"name\"\"\"]: job[\"\"\"html_url\"\"\"] for job in result[\"\"\"jobs\"\"\"]} )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn job_links\r\n\t\t\t\t\t\texcept Exception:\r\n\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Unknown error, could not fetch links:\\n{traceback.format_exc()}\"\"\" )\r\n\r\n\t\t\t\t\t\treturn {}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif token is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t{\"\"\"Accept\"\"\": \"\"\"application/vnd.github+json\"\"\", \"\"\"Authorization\"\"\": F\"\"\"Bearer {token}\"\"\"}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tF\"\"\"https://api.github.com/repos/huggingface/transformers/actions/runs/{worflow_run_id}/artifacts?per_page=100\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t{}\r\n\r\n\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\tartifacts.update({artifact[\"\"\"name\"\"\"]: artifact[\"\"\"archive_download_url\"\"\"] for artifact in result[\"\"\"artifacts\"\"\"]} )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tmath.ceil((result[\"\"\"total_count\"\"\"] - 1_0_0) / 1_0_0 )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor i in range(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\trequests.get(url + F\"\"\"&page={i + 2}\"\"\" ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tartifacts.update({artifact[\"\"\"name\"\"\"]: artifact[\"\"\"archive_download_url\"\"\"] for artifact in result[\"\"\"artifacts\"\"\"]} )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn artifacts\r\n\t\t\t\t\t\texcept Exception:\r\n\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Unknown error, could not fetch links:\\n{traceback.format_exc()}\"\"\" )\r\n\r\n\t\t\t\t\t\treturn {}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif token is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\"\"\"Accept\"\"\": \"\"\"application/vnd.github+json\"\"\", \"\"\"Authorization\"\"\": F\"\"\"Bearer {token}\"\"\"}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tresult.headers[\"\"\"Location\"\"\"]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(SCREAMING_SNAKE_CASE__ ,F\"\"\"{artifact_name}.zip\"\"\" )\r\n\t\t\t\t\t\twith open(SCREAMING_SNAKE_CASE__ ,\"\"\"wb\"\"\" ) as fp:\r\n\t\t\t\t\t\t\t\t\t\t\t\tfp.write(response.content )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\twith zipfile.ZipFile(SCREAMING_SNAKE_CASE__ ) as z:\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor filename in z.namelist():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif not os.path.isdir(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# read the file\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif filename in [\"failures_line.txt\", \"summary_short.txt\", \"job_name.txt\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith z.open(SCREAMING_SNAKE_CASE__ ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor line in f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tline.decode(\"\"\"UTF-8\"\"\" ).strip()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif filename == \"failures_line.txt\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# `error_line` is the place where `error` occurs\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tline[: line.index(\"\"\": \"\"\" )]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tline[line.index(\"\"\": \"\"\" ) + len(\"\"\": \"\"\" ) :]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\terrors.append([error_line, error] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texcept Exception:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# skip un-related lines\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpass\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif filename == \"summary_short.txt\" and line.startswith(\"\"\"FAILED \"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# `test` is the test method that failed\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tline[len(\"\"\"FAILED \"\"\" ) :]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfailed_tests.append(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif filename == \"job_name.txt\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tline\r\n\r\n\t\t\t\t\t\tif len(SCREAMING_SNAKE_CASE__ ) != len(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t F\"\"\"`errors` and `failed_tests` should have the same number of elements. Got {len(SCREAMING_SNAKE_CASE__ )} for `errors` \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t F\"\"\"and {len(SCREAMING_SNAKE_CASE__ )} for `failed_tests` instead. The test reports in {artifact_zip_path} have some\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t \"\"\" problem.\"\"\" )\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif job_name and job_links:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tjob_links.get(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\t# A list with elements of the form (line of error, error, failed test)\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[x + [y] + [job_link] for x, y in zip(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )]\r\n\r\n\t\t\t\t\t\treturn result\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[]\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[os.path.join(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) for p in os.listdir(SCREAMING_SNAKE_CASE__ ) if p.endswith(\"\"\".zip\"\"\" )]\r\n\t\t\t\t\t\tfor p in paths:\r\n\t\t\t\t\t\t\t\t\t\t\t\terrors.extend(get_errors_from_single_artifact(SCREAMING_SNAKE_CASE__ ,job_links=SCREAMING_SNAKE_CASE__ ) )\r\n\r\n\t\t\t\t\t\treturn errors\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tCounter()\r\n\t\t\t\t\t\tcounter.update([x[1] for x in logs] )\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tcounter.most_common()\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{}\r\n\t\t\t\t\t\tfor error, count in counts:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif error_filter is None or error not in error_filter:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t{\"\"\"count\"\"\": count, \"\"\"failed_tests\"\"\": [(x[2], x[0]) for x in logs if x[1] == error]}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1][\"count\"] ,reverse=SCREAMING_SNAKE_CASE__ ) )\r\n\t\t\t\t\t\treturn r\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttest.split(\"\"\"::\"\"\" )[0]\r\n\t\t\t\t\t\tif test.startswith(\"\"\"tests/models/\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttest.split(\"\"\"/\"\"\" )[2]\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\treturn test\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[(x[0], x[1], get_model(x[2] )) for x in logs]\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[x for x in logs if x[2] is not None]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{x[2] for x in logs}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t{}\r\n\t\t\t\t\t\tfor test in tests:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tCounter()\r\n\t\t\t\t\t\t\t\t\t\t\t\t# count by errors in `test`\r\n\t\t\t\t\t\t\t\t\t\t\t\tcounter.update([x[1] for x in logs if x[2] == test] )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tcounter.most_common()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{error: count for error, count in counts if (error_filter is None or error not in error_filter)}\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tsum(error_counts.values() )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif n_errors > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t{\"\"\"count\"\"\": n_errors, \"\"\"errors\"\"\": error_counts}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1][\"count\"] ,reverse=SCREAMING_SNAKE_CASE__ ) )\r\n\t\t\t\t\t\treturn r\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t\"\"\"| no. | error | status |\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t\"\"\"|-:|:-|:-|\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[header, sep]\r\n\t\t\t\t\t\tfor error in reduced_by_error:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\treduced_by_error[error][\"\"\"count\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tF\"\"\"| {count} | {error[:1_0_0]} | |\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\tlines.append(SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\treturn \"\\n\".join(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t\"\"\"| model | no. of errors | major error | count |\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t\"\"\"|-:|-:|-:|-:|\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[header, sep]\r\n\t\t\t\t\t\tfor model in reduced_by_model:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\treduced_by_model[model][\"\"\"count\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tlist(reduced_by_model[model][\"\"\"errors\"\"\"].items() )[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tF\"\"\"| {model} | {count} | {error[:6_0]} | {_count} |\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\tlines.append(SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\treturn \"\\n\".join(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =argparse.ArgumentParser()\r\n\t\t# Required parameters\r\n\t\tparser.add_argument('--workflow_run_id', type=str, required=True, help='A GitHub Actions workflow run id.')\r\n\t\tparser.add_argument(\r\n\t\t '--output_dir',\r\n\t\t type=str,\r\n\t\t required=True,\r\n\t\t help='Where to store the downloaded artifacts and other result files.',\r\n\t\t)\r\n\t\tparser.add_argument('--token', default=None, type=str, help='A token that has actions:read permission.')\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =parser.parse_args()\r\n\r\n\t\tos.makedirs(args.output_dir, exist_ok=True)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =get_job_links(args.workflow_run_id, token=args.token)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={}\r\n\t\t# To deal with `workflow_call` event, where a job name is the combination of the job names in the caller and callee.\r\n\t\t# For example, `PyTorch 1.11 / Model tests (models/albert, single-gpu)`.\r\n\t\tif _job_links:\r\n\t\t\t\tfor k, v in _job_links.items():\r\n\t\t\t\t\t\t# This is how GitHub actions combine job names.\r\n\t\t\t\t\t\tif \" / \" in k:\r\n\t\t\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =k.find(' / ')\r\n\t\t\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =k[index + len(' / ') :]\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =v\r\n\t\twith open(os.path.join(args.output_dir, 'job_links.json'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tjson.dump(job_links, fp, ensure_ascii=False, indent=4)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =get_artifacts_links(args.workflow_run_id, token=args.token)\r\n\t\twith open(os.path.join(args.output_dir, 'artifacts.json'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tjson.dump(artifacts, fp, ensure_ascii=False, indent=4)\r\n\r\n\t\tfor idx, (name, url) in enumerate(artifacts.items()):\r\n\t\t\t\tdownload_artifact(name, url, args.output_dir, args.token)\r\n\t\t\t\t# Be gentle to GitHub\r\n\t\t\t\ttime.sleep(1)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =get_all_errors(args.output_dir, job_links=job_links)\r\n\r\n\t\t# `e[1]` is the error\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =Counter()\r\n\t\tcounter.update([e[1] for e in errors])\r\n\r\n\t\t# print the top 30 most common test errors\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =counter.most_common(30)\r\n\t\tfor item in most_common:\r\n\t\t\t\tprint(item)\r\n\r\n\t\twith open(os.path.join(args.output_dir, 'errors.json'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tjson.dump(errors, fp, ensure_ascii=False, indent=4)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =reduce_by_error(errors)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =reduce_by_model(errors)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =make_github_table(reduced_by_error)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] =make_github_table_per_model(reduced_by_model)\r\n\r\n\t\twith open(os.path.join(args.output_dir, 'reduced_by_error.txt'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tfp.write(sa)\r\n\t\twith open(os.path.join(args.output_dir, 'reduced_by_model.txt'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tfp.write(sa)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305157,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\r\nfrom diffusers.utils.testing_utils import require_onnxruntime\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_onnxruntime\r\nclass _a :\r\n\t\t\tpass\r\n\r\n"},"code_codestyle":{"kind":"number","value":708,"string":"708"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\n# rely on isort to merge the imports\r\nfrom ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] ={\r\n 'configuration_autoformer': [\r\n 'AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP',\r\n 'AutoformerConfig',\r\n ],\r\n}\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =[\r\n\t\t 'AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'AutoformerForPrediction',\r\n\t\t 'AutoformerModel',\r\n\t\t 'AutoformerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_autoformer import (\r\n\t\t AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP,\r\n\t\t AutoformerConfig,\r\n\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_autoformer import (\r\n\t\t\t\t AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t AutoformerForPrediction,\r\n\t\t\t\t AutoformerModel,\r\n\t\t\t\t AutoformerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305158,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\r\nimport fire\r\n\r\nfrom utils import calculate_rouge, save_json\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ,**SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[x.strip() for x in open(_lowercase ).readlines()]\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t[x.strip() for x in open(_lowercase ).readlines()][: len(_lowercase )]\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tcalculate_rouge(_lowercase ,_lowercase ,**_lowercase )\r\n\t\t\t\t\t\tif save_path is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tsave_json(_lowercase ,_lowercase ,indent=_lowercase )\r\n\t\t\t\t\t\treturn metrics # these print nicely\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tfire.Fire(calculate_rouge_path)\r\n\r\n"},"code_codestyle":{"kind":"number","value":709,"string":"709"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport copy\r\nfrom collections import OrderedDict\r\nfrom typing import Dict, Mapping\r\n\r\nfrom packaging import version\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...onnx import OnnxConfig\r\nfrom ...utils import logging\r\nfrom ..auto import CONFIG_MAPPING\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] ={\r\n 'facebook/detr-resnet-50': 'https://huggingface.co/facebook/detr-resnet-50/resolve/main/config.json',\r\n # See all DETR models at https://huggingface.co/models?filter=detr\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: List[str] =\t\t\"detr\"\r\n\t\t\t_UpperCamelCase: Dict =\t\t[\"past_key_values\"]\r\n\t\t\t_UpperCamelCase: Optional[int] =\t\t{\r\n\t\t\t \"hidden_size\": \"d_model\",\r\n\t\t\t \"num_attention_heads\": \"encoder_attention_heads\",\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=100\t\t\t\t\t\t, lowercase_=6\t\t\t\t\t\t, lowercase_=2048\t\t\t\t\t\t, lowercase_=8\t\t\t\t\t\t, lowercase_=6\t\t\t\t\t\t, lowercase_=2048\t\t\t\t\t\t, lowercase_=8\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=\"relu\"\t\t\t\t\t\t, lowercase_=256\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1.0\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=\"sine\"\t\t\t\t\t\t, lowercase_=\"resnet50\"\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=5\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=5\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tif backbone_config is not None and use_timm_backbone:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"You can't specify both `backbone_config` and `use_timm_backbone`.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tif not use_timm_backbone:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif backbone_config is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlogger.info(\"\"\"`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tCONFIG_MAPPING[\"\"\"resnet\"\"\"](out_features=[\"\"\"stage4\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tbackbone_config.get(\"\"\"model_type\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tCONFIG_MAPPING[backbone_model_type]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tconfig_class.from_dict(lowercase_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# set timm attributes to None\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tNone, None, None\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tuse_timm_backbone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tbackbone_config\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tnum_queries\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\td_model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tencoder_ffn_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tencoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tencoder_attention_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_ffn_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tdecoder_attention_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tattention_dropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tactivation_dropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tactivation_function\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tinit_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tinit_xavier_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tencoder_layerdrop\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdecoder_layerdrop\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tencoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tauxiliary_loss\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tposition_embedding_type\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tbackbone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tuse_pretrained_backbone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdilation\r\n\t\t\t\t\t\t\t\t\t# Hungarian matcher\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tclass_cost\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tbbox_cost\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tgiou_cost\r\n\t\t\t\t\t\t\t\t\t# Loss coefficients\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmask_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdice_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tbbox_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tgiou_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\teos_coefficient\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(is_encoder_decoder=lowercase_\t\t\t\t\t\t, **lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self.encoder_attention_heads\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self.d_model\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@classmethod\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tcls\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn cls(backbone_config=lowercase_\t\t\t\t\t\t, **lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict[str, any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tcopy.deepcopy(self.__dict__ )\r\n\t\t\t\t\t\t\t\t\tif output[\"backbone_config\"] is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.backbone_config.to_dict()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.__class__.model_type\r\n\t\t\t\t\t\t\t\t\treturn output\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: Any =\t\tversion.parse(\"1.11\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tMapping[str, Mapping[int, str]]:\r\n\t\t\t\t\t\t\t\t\treturn OrderedDict(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t (\"\"\"pixel_values\"\"\", {0: \"\"\"batch\"\"\", 1: \"\"\"num_channels\"\"\", 2: \"\"\"height\"\"\", 3: \"\"\"width\"\"\"}),\r\n\t\t\t\t\t\t\t\t\t (\"\"\"pixel_mask\"\"\", {0: \"\"\"batch\"\"\"}),\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tfloat:\r\n\t\t\t\t\t\t\t\t\treturn 1e-5\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn 12\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305159,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr ={\r\n 0: '''0''',\r\n 1: '''1''',\r\n 2: '''2''',\r\n 3: '''3''',\r\n 4: '''4''',\r\n 5: '''5''',\r\n 6: '''6''',\r\n 7: '''7''',\r\n 8: '''8''',\r\n 9: '''9''',\r\n 10: '''a''',\r\n 11: '''b''',\r\n 12: '''c''',\r\n 13: '''d''',\r\n 14: '''e''',\r\n 15: '''f''',\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tassert type(UpperCAmelCase__ ) in (int, float) and decimal == int(UpperCAmelCase__ )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tint(UpperCAmelCase__ )\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t\"\"\"\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tFalse\r\n\t\t\t\t\t\tif decimal < 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tTrue\r\n\t\t\t\t\t\t\t\t\t\t\t\tdecimal *= -1\r\n\t\t\t\t\t\twhile decimal > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : str\t\t\t\t =\t\t\t\tdivmod(UpperCAmelCase__ ,1_6 )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tvalues[remainder] + hexadecimal\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t\"\"\"0x\"\"\" + hexadecimal\r\n\t\t\t\t\t\tif negative:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t\"\"\"-\"\"\" + hexadecimal\r\n\t\t\t\t\t\treturn hexadecimal\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\r\n"},"code_codestyle":{"kind":"number","value":710,"string":"710"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport logging\r\nimport os\r\nimport sys\r\nfrom pathlib import Path\r\n\r\nimport finetune_rag\r\n\r\nfrom transformers.file_utils import is_apex_available\r\nfrom transformers.testing_utils import (\r\n TestCasePlus,\r\n execute_subprocess_async,\r\n require_ray,\r\n require_torch_gpu,\r\n require_torch_multi_gpu,\r\n)\r\n\r\n\r\nlogging.basicConfig(level=logging.DEBUG)\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =logging.getLogger()\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =logging.StreamHandler(sys.stdout)\r\nlogger.addHandler(stream_handler)\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tos.makedirs(lowercase_\t\t\t\t\t\t, exist_ok=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{\"\"\"source\"\"\": \"\"\"What is love ?\"\"\", \"\"\"target\"\"\": \"\"\"life\"\"\"}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\"\"\"train\"\"\": 12, \"\"\"val\"\"\": 2, \"\"\"test\"\"\": 2}\r\n\t\t\t\t\t\t\t\t\tfor split in [\"train\", \"test\", \"val\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor field in [\"source\", \"target\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t\"\"\"\\n\"\"\".join([contents[field]] * n_lines[split] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(lowercase_\t\t\t\t\t\t, f\"\"\"{split}.{field}\"\"\" )\t\t\t\t\t\t, \"\"\"w\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tf.write(lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = \"pytorch\" ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.get_auto_remove_tmp_dir()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(lowercase_\t\t\t\t\t\t, \"\"\"output\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tos.path.join(lowercase_\t\t\t\t\t\t, \"\"\"data\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself._create_dummy_data(data_dir=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tf\"\"\"\n --data_dir {data_dir} \\\n --output_dir {output_dir} \\\n --model_name_or_path facebook/rag-sequence-base \\\n --model_type rag_sequence \\\n --do_train \\\n --do_predict \\\n --n_val -1 \\\n --val_check_interval 1.0 \\\n --train_batch_size 2 \\\n --eval_batch_size 1 \\\n --max_source_length 25 \\\n --max_target_length 25 \\\n --val_max_target_length 25 \\\n --test_max_target_length 25 \\\n --label_smoothing 0.1 \\\n --dropout 0.1 \\\n --attention_dropout 0.1 \\\n --weight_decay 0.001 \\\n --adam_epsilon 1e-08 \\\n --max_grad_norm 0.1 \\\n --lr_scheduler polynomial \\\n --learning_rate 3e-04 \\\n --num_train_epochs 1 \\\n --warmup_steps 4 \\\n --gradient_accumulation_steps 1 \\\n --distributed-port 8787 \\\n --use_dummy_dataset 1 \\\n --distributed_retriever {distributed_retriever} \\\n \"\"\".split()\r\n\r\n\t\t\t\t\t\t\t\t\tif gpus > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(f\"\"\"--gpus={gpus}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif is_apex_available():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--fp16\"\"\" )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--gpus=0\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--distributed_backend=ddp_cpu\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--num_processes=2\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[sys.executable, str(Path(finetune_rag.__file__ ).resolve() )] + testargs\r\n\t\t\t\t\t\t\t\t\texecute_subprocess_async(lowercase_\t\t\t\t\t\t, env=self.get_env() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(lowercase_\t\t\t\t\t\t, \"\"\"metrics.json\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith open(lowercase_ ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tjson.load(lowercase_ )\r\n\t\t\t\t\t\t\t\t\treturn result\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_gpu\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself._run_finetune(gpus=1 )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_multi_gpu\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself._run_finetune(gpus=2 )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_gpu\r\n\t\t\t@require_ray\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself._run_finetune(gpus=1\t\t\t\t\t\t, distributed_retriever=\"\"\"ray\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_multi_gpu\r\n\t\t\t@require_ray\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself._run_finetune(gpus=1\t\t\t\t\t\t, distributed_retriever=\"\"\"ray\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305160,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ = 1_0_0 ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tset()\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t0\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tn + 1 # maximum limit\r\n\r\n\t\t\t\t\t\tfor a in range(2 ,_snake_case ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor b in range(2 ,_snake_case ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ta**b # calculates the current power\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcollect_powers.add(_snake_case ) # adds the result to the set\r\n\t\t\t\t\t\treturn len(_snake_case )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tprint('Number of terms ', solution(int(str(input()).strip())))\r\n\r\n"},"code_codestyle":{"kind":"number","value":711,"string":"711"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] ={\r\n 'transfo-xl-wt103': 'https://huggingface.co/transfo-xl-wt103/resolve/main/config.json',\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\t\"transfo-xl\"\r\n\t\t\t_UpperCamelCase: str =\t\t[\"mems\"]\r\n\t\t\t_UpperCamelCase: Dict =\t\t{\r\n\t\t\t \"n_token\": \"vocab_size\",\r\n\t\t\t \"hidden_size\": \"d_model\",\r\n\t\t\t \"num_attention_heads\": \"n_head\",\r\n\t\t\t \"num_hidden_layers\": \"n_layer\",\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=267735\t\t\t\t\t\t, lowercase_=[20000, 40000, 200000]\t\t\t\t\t\t, lowercase_=1024\t\t\t\t\t\t, lowercase_=1024\t\t\t\t\t\t, lowercase_=16\t\t\t\t\t\t, lowercase_=64\t\t\t\t\t\t, lowercase_=4096\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=18\t\t\t\t\t\t, lowercase_=1600\t\t\t\t\t\t, lowercase_=1000\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=-1\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=\"normal\"\t\t\t\t\t\t, lowercase_=0.0_1\t\t\t\t\t\t, lowercase_=0.0_1\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-5\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tself.cutoffs.extend(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tif proj_share_all_but_first:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[False] + [True] * len(self.cutoffs )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[False] + [False] * len(self.cutoffs )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\td_model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\td_embed\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\td_head\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\td_inner\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdiv_val\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tpre_lnorm\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tn_layer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tn_head\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tmem_len\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tsame_length\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tattn_type\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tclamp_len\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tsample_softmax\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tadaptive\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tdropatt\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tuntie_r\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tinit\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tinit_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tproj_init_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tinit_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tlayer_norm_epsilon\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(eos_token_id=lowercase_\t\t\t\t\t\t, **lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\t# Message copied from Transformer-XL documentation\r\n\t\t\t\t\t\t\t\t\tlogger.info(f\"\"\"The model {self.model_type} is one of the few models that has no sequence length limit.\"\"\" )\r\n\t\t\t\t\t\t\t\t\treturn -1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@max_position_embeddings.setter\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\t# Message copied from Transformer-XL documentation\r\n\t\t\t\t\t\t\t\t\traise NotImplementedError(\r\n\t\t\t\t\t\t\t\t\t f\"\"\"The model {self.model_type} is one of the few models that has no sequence length limit.\"\"\" )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305161,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport unittest\r\n\r\nimport numpy as np\r\n\r\nfrom transformers.testing_utils import require_flax, require_tf, require_torch\r\nfrom transformers.utils import (\r\n expand_dims,\r\n flatten_dict,\r\n is_flax_available,\r\n is_tf_available,\r\n is_torch_available,\r\n reshape,\r\n squeeze,\r\n transpose,\r\n)\r\n\r\n\r\nif is_flax_available():\r\n\t\timport jax.numpy as jnp\r\n\r\nif is_tf_available():\r\n\t\timport tensorflow as tf\r\n\r\nif is_torch_available():\r\n\t\timport torch\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params''': {\r\n\t\t\t\t\t\t\t\t\t '''summarization''': {'''length_penalty''': 1.0, '''max_length''': 128, '''min_length''': 12, '''num_beams''': 4},\r\n\t\t\t\t\t\t\t\t\t '''summarization_cnn''': {'''length_penalty''': 2.0, '''max_length''': 142, '''min_length''': 56, '''num_beams''': 4},\r\n\t\t\t\t\t\t\t\t\t '''summarization_xsum''': {'''length_penalty''': 1.0, '''max_length''': 62, '''min_length''': 11, '''num_beams''': 6},\r\n\t\t\t\t\t\t\t\t\t }\r\n\t\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization.length_penalty''': 1.0,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization.max_length''': 128,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization.min_length''': 12,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization.num_beams''': 4,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_cnn.length_penalty''': 2.0,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_cnn.max_length''': 142,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_cnn.min_length''': 56,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_cnn.num_beams''': 4,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_xsum.length_penalty''': 1.0,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_xsum.max_length''': 62,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_xsum.min_length''': 11,\r\n\t\t\t\t\t\t\t\t\t '''task_specific_params.summarization_xsum.num_beams''': 6,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(flatten_dict(UpperCamelCase__ )\t\t\t\t\t\t, UpperCamelCase__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__ )\t\t\t\t\t\t, x.transpose() ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__\t\t\t\t\t\t, axes=(1, 2, 0) )\t\t\t\t\t\t, x.transpose((1, 2, 0) ) ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__ )\t\t\t\t\t\t, transpose(UpperCamelCase__ ).numpy() ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttorch.tensor(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__\t\t\t\t\t\t, axes=(1, 2, 0) )\t\t\t\t\t\t, transpose(UpperCamelCase__\t\t\t\t\t\t, axes=(1, 2, 0) ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_tf\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttf.constant(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__ )\t\t\t\t\t\t, transpose(UpperCamelCase__ ).numpy() ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttf.constant(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__\t\t\t\t\t\t, axes=(1, 2, 0) )\t\t\t\t\t\t, transpose(UpperCamelCase__\t\t\t\t\t\t, axes=(1, 2, 0) ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_flax\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tjnp.array(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__ )\t\t\t\t\t\t, np.asarray(transpose(UpperCamelCase__ ) ) ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tjnp.array(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(transpose(UpperCamelCase__\t\t\t\t\t\t, axes=(1, 2, 0) )\t\t\t\t\t\t, np.asarray(transpose(UpperCamelCase__\t\t\t\t\t\t, axes=(1, 2, 0) ) ) ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) )\t\t\t\t\t\t, np.reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) ) ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) )\t\t\t\t\t\t, np.reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) ) ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) )\t\t\t\t\t\t, reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) ).numpy() ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttorch.tensor(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) )\t\t\t\t\t\t, reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_tf\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttf.constant(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) )\t\t\t\t\t\t, reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) ).numpy() ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttf.constant(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) )\t\t\t\t\t\t, reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_flax\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tjnp.array(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) )\t\t\t\t\t\t, np.asarray(reshape(UpperCamelCase__\t\t\t\t\t\t, (4, 3) ) ) ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tjnp.array(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) )\t\t\t\t\t\t, np.asarray(reshape(UpperCamelCase__\t\t\t\t\t\t, (12, 5) ) ) ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__ )\t\t\t\t\t\t, np.squeeze(UpperCamelCase__ ) ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 4\t\t\t\t\t\t, 1\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 )\t\t\t\t\t\t, np.squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 ) ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.tensor(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__ )\t\t\t\t\t\t, squeeze(UpperCamelCase__ ).numpy() ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 4\t\t\t\t\t\t, 1\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttorch.tensor(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 )\t\t\t\t\t\t, squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_tf\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttf.constant(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__ )\t\t\t\t\t\t, squeeze(UpperCamelCase__ ).numpy() ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 4\t\t\t\t\t\t, 1\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttf.constant(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 )\t\t\t\t\t\t, squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_flax\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tjnp.array(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__ )\t\t\t\t\t\t, np.asarray(squeeze(UpperCamelCase__ ) ) ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tnp.random.randn(1\t\t\t\t\t\t, 4\t\t\t\t\t\t, 1\t\t\t\t\t\t, 5 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tjnp.array(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 )\t\t\t\t\t\t, np.asarray(squeeze(UpperCamelCase__\t\t\t\t\t\t, axis=2 ) ) ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 )\t\t\t\t\t\t, np.expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 ) ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttorch.tensor(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 )\t\t\t\t\t\t, expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_tf\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttf.constant(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 )\t\t\t\t\t\t, expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 ).numpy() ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_flax\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tnp.random.randn(3\t\t\t\t\t\t, 4 )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tjnp.array(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(np.allclose(expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 )\t\t\t\t\t\t, np.asarray(expand_dims(UpperCamelCase__\t\t\t\t\t\t, axis=1 ) ) ) )\r\n\r\n"},"code_codestyle":{"kind":"number","value":712,"string":"712"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport torch\r\n\r\nfrom diffusers import DiffusionPipeline\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tsuper().__init__()\r\n\r\n\t\t\t\t\t\t\t\t\tself.register_modules(unet=lowercase_\t\t\t\t\t\t, scheduler=lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __call__(\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.randn(\r\n\t\t\t\t\t\t\t\t\t (1, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size)\t\t\t\t\t\t, )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t1\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.unet(lowercase_\t\t\t\t\t\t, lowercase_ ).sample\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.scheduler.step(lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ).prev_sample\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tscheduler_output - scheduler_output + torch.ones_like(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\treturn result\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305162,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport inspect\r\nimport unittest\r\n\r\nimport numpy as np\r\n\r\nfrom tests.test_modeling_common import floats_tensor\r\nfrom transformers import MaskaFormerConfig, is_torch_available, is_vision_available\r\nfrom transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device\r\nfrom transformers.utils import cached_property\r\n\r\nfrom ...test_configuration_common import ConfigTester\r\nfrom ...test_modeling_common import ModelTesterMixin\r\nfrom ...test_pipeline_mixin import PipelineTesterMixin\r\n\r\n\r\nif is_torch_available():\r\n\t\timport torch\r\n\r\n\t\tfrom transformers import MaskaFormerForUniversalSegmentation, MaskaFormerModel\r\n\r\n\t\tif is_vision_available():\r\n\t\t\t\tfrom transformers import MaskaFormerImageProcessor\r\n\r\nif is_vision_available():\r\n\t\tfrom PIL import Image\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=10\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=32 * 8\t\t\t\t\t\t, lowercase_=32 * 8\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=64\t\t\t\t\t\t, ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tparent\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tbatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tis_training\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tuse_auxiliary_loss\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tnum_queries\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tmin_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmax_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnum_labels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\thidden_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\thidden_dim\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tfloats_tensor([self.batch_size, self.num_channels, self.min_size, self.max_size] ).to(\r\n\t\t\t\t\t\t\t\t\t _SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.ones([self.batch_size, self.min_size, self.max_size]\t\t\t\t\t\t, device=_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t(\r\n\t\t\t\t\t\t\t\t\t torch.rand([self.batch_size, self.num_labels, self.min_size, self.max_size]\t\t\t\t\t\t, device=_SCREAMING_SNAKE_CASE ) > 0.5\r\n\t\t\t\t\t\t\t\t\t).float()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t(torch.rand((self.batch_size, self.num_labels)\t\t\t\t\t\t, device=_SCREAMING_SNAKE_CASE ) > 0.5).long()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.get_config()\r\n\t\t\t\t\t\t\t\t\treturn config, pixel_values, pixel_mask, mask_labels, class_labels\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tMaskaFormerConfig(\r\n\t\t\t\t\t\t\t\t\t hidden_size=self.hidden_dim\t\t\t\t\t\t, )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.num_queries\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.num_labels\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[1, 1, 1, 1]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.num_channels\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t64\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t128\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.hidden_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.hidden_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.hidden_dim\r\n\t\t\t\t\t\t\t\t\treturn config\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.prepare_config_and_inputs()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t{\"\"\"pixel_values\"\"\": pixel_values, \"\"\"pixel_mask\"\"\": pixel_mask}\r\n\t\t\t\t\t\t\t\t\treturn config, inputs_dict\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\toutput.encoder_hidden_states\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\toutput.pixel_decoder_hidden_states\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\toutput.transformer_decoder_hidden_states\r\n\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(len(_SCREAMING_SNAKE_CASE )\t\t\t\t\t\t, len(config.backbone_config.depths ) )\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(len(_SCREAMING_SNAKE_CASE )\t\t\t\t\t\t, len(config.backbone_config.depths ) )\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(len(_SCREAMING_SNAKE_CASE )\t\t\t\t\t\t, config.decoder_layers )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=False ) ->\t\t\t\tTuple:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\twith torch.no_grad():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tMaskaFormerModel(config=_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmodel.to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmodel.eval()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmodel(pixel_values=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, pixel_mask=_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tmodel(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, output_hidden_states=_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tself.parent.assertEqual(\r\n\t\t\t\t\t\t\t\t\t output.transformer_decoder_last_hidden_state.shape\t\t\t\t\t\t, (self.batch_size, self.num_queries, self.hidden_dim)\t\t\t\t\t\t, )\r\n\t\t\t\t\t\t\t\t\t# let's ensure the other two hidden state exists\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(output.pixel_decoder_last_hidden_state is not None )\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(output.encoder_last_hidden_state is not None )\r\n\r\n\t\t\t\t\t\t\t\t\tif output_hidden_states:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.check_output_hidden_state(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, _SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tMaskaFormerForUniversalSegmentation(config=_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tmodel.to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tmodel.eval()\r\n\r\n\t\t\t\t\t\t\t\t\tdef comm_check_on_output(lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# let's still check that all the required stuff is there\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.parent.assertTrue(result.transformer_decoder_last_hidden_state is not None )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.parent.assertTrue(result.pixel_decoder_last_hidden_state is not None )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.parent.assertTrue(result.encoder_last_hidden_state is not None )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# okay, now we need to check the logits shape\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# due to the encoder compression, masks have a //4 spatial size\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.parent.assertEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t result.masks_queries_logits.shape\t\t\t\t\t\t, (self.batch_size, self.num_queries, self.min_size // 4, self.max_size // 4)\t\t\t\t\t\t, )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# + 1 for null class\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.parent.assertEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t result.class_queries_logits.shape\t\t\t\t\t\t, (self.batch_size, self.num_queries, self.num_labels + 1) )\r\n\r\n\t\t\t\t\t\t\t\t\twith torch.no_grad():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tmodel(pixel_values=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, pixel_mask=_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tmodel(_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcomm_check_on_output(_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmodel(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t pixel_values=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, pixel_mask=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, mask_labels=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, class_labels=_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tcomm_check_on_output(_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(result.loss is not None )\r\n\t\t\t\t\t\t\t\t\tself.parent.assertEqual(result.loss.shape\t\t\t\t\t\t, torch.Size([1] ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_torch\r\nclass _a (\t\t\tsnake_case_ , snake_case_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: str =\t\t(MaskaFormerModel, MaskaFormerForUniversalSegmentation) if is_torch_available() else ()\r\n\t\t\t_UpperCamelCase: Optional[Any] =\t\t{\"feature-extraction\": MaskaFormerModel} if is_torch_available() else {}\r\n\r\n\t\t\t_UpperCamelCase: Tuple =\t\tFalse\r\n\t\t\t_UpperCamelCase: List[str] =\t\tFalse\r\n\t\t\t_UpperCamelCase: Optional[int] =\t\tFalse\r\n\t\t\t_UpperCamelCase: Optional[Any] =\t\tFalse\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tMaskaFormerModelTester(self )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tConfigTester(self\t\t\t\t\t\t, config_class=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, has_text_modality=_SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tself.config_tester.run_common_tests()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs_for_common()\r\n\t\t\t\t\t\t\t\t\tself.model_tester.create_and_check_maskaformer_model(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, **_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, output_hidden_states=_SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs()\r\n\t\t\t\t\t\t\t\t\tself.model_tester.create_and_check_maskaformer_instance_segmentation_head_model(*_SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skip(reason=\"\"\"Mask2Former does not use inputs_embeds\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skip(reason=\"\"\"Mask2Former does not have a get_input_embeddings method\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skip(reason=\"\"\"Mask2Former is not a generative model\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skip(reason=\"\"\"Mask2Former does not use token embeddings\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_multi_gpu\r\n\t\t\t@unittest.skip(\r\n\t\t\t reason=\"\"\"Mask2Former has some layers using `add_module` which doesn't work well with `nn.DataParallel`\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skip(\"\"\"Will be fixed soon by reducing the size of the model used for common tests.\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs_for_common()\r\n\r\n\t\t\t\t\t\t\t\t\tfor model_class in self.all_model_classes:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmodel_class(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tinspect.signature(model.forward )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# signature.parameters is an OrderedDict => so arg_names order is deterministic\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[*signature.parameters.keys()]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[\"\"\"pixel_values\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(arg_names[:1]\t\t\t\t\t\t, _SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@slow\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\tfor model_name in [\"facebook/mask2former-swin-small-coco-instance\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tMaskaFormerModel.from_pretrained(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsNotNone(_SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(self.model_tester.min_size,) * 2\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"pixel_values\"\"\": torch.randn((2, 3, *size)\t\t\t\t\t\t, device=_SCREAMING_SNAKE_CASE ),\r\n\t\t\t\t\t\t\t\t\t \"\"\"mask_labels\"\"\": torch.randn((2, 10, *size)\t\t\t\t\t\t, device=_SCREAMING_SNAKE_CASE ),\r\n\t\t\t\t\t\t\t\t\t \"\"\"class_labels\"\"\": torch.zeros(2\t\t\t\t\t\t, 10\t\t\t\t\t\t, device=_SCREAMING_SNAKE_CASE ).long(),\r\n\t\t\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.model_tester.get_config()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tMaskaFormerForUniversalSegmentation(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tmodel(**_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(outputs.loss is not None )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Any\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs_for_common()\r\n\t\t\t\t\t\t\t\t\tself.model_tester.create_and_check_maskaformer_model(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, **_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, output_hidden_states=_SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : int\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs_for_common()\r\n\r\n\t\t\t\t\t\t\t\t\tfor model_class in self.all_model_classes:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tmodel_class(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmodel(**_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, output_attentions=_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(outputs.attentions is not None )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\tif not self.model_tester.is_training:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.all_model_classes[1]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tmodel_class(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tmodel.to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tmodel.train()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tmodel(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, mask_labels=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, class_labels=_SCREAMING_SNAKE_CASE ).loss\r\n\t\t\t\t\t\t\t\t\tloss.backward()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tself.all_model_classes[1]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tTrue\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tTrue\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmodel_class(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tmodel.train()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tmodel(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, mask_labels=_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, class_labels=_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\toutputs.encoder_hidden_states[0]\r\n\t\t\t\t\t\t\t\t\tencoder_hidden_states.retain_grad()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\toutputs.pixel_decoder_hidden_states[0]\r\n\t\t\t\t\t\t\t\t\tpixel_decoder_hidden_states.retain_grad()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\toutputs.transformer_decoder_hidden_states[0]\r\n\t\t\t\t\t\t\t\t\ttransformer_decoder_hidden_states.retain_grad()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\toutputs.attentions[0]\r\n\t\t\t\t\t\t\t\t\tattentions.retain_grad()\r\n\r\n\t\t\t\t\t\t\t\t\toutputs.loss.backward(retain_graph=_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertIsNotNone(encoder_hidden_states.grad )\r\n\t\t\t\t\t\t\t\t\tself.assertIsNotNone(pixel_decoder_hidden_states.grad )\r\n\t\t\t\t\t\t\t\t\tself.assertIsNotNone(transformer_decoder_hidden_states.grad )\r\n\t\t\t\t\t\t\t\t\tself.assertIsNotNone(attentions.grad )\r\n\r\n\r\n\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =1e-4\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tImage.open(\"\"\"./tests/fixtures/tests_samples/COCO/000000039769.png\"\"\" )\r\n\t\t\t\t\t\treturn image\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_vision\r\n@slow\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@cached_property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn \"facebook/mask2former-swin-small-coco-instance\"\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@cached_property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn MaskaFormerImageProcessor.from_pretrained(self.model_checkpoints ) if is_vision_available() else None\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tMaskaFormerModel.from_pretrained(self.model_checkpoints ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.default_image_processor\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tprepare_img()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\timage_processor(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tinputs[\"\"\"pixel_values\"\"\"].shape\r\n\t\t\t\t\t\t\t\t\t# check size is divisible by 32\r\n\t\t\t\t\t\t\t\t\tself.assertTrue((inputs_shape[-1] % 32) == 0 and (inputs_shape[-2] % 32) == 0 )\r\n\t\t\t\t\t\t\t\t\t# check size\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, (1, 3, 384, 384) )\r\n\r\n\t\t\t\t\t\t\t\t\twith torch.no_grad():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tmodel(**_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.tensor(\r\n\t\t\t\t\t\t\t\t\t [[-0.2_7_9_0, -1.0_7_1_7, -1.1_6_6_8], [-0.5_1_2_8, -0.3_1_2_8, -0.4_9_8_7], [-0.5_8_3_2, 0.1_9_7_1, -0.0_1_9_7]] ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(\r\n\t\t\t\t\t\t\t\t\t torch.allclose(\r\n\t\t\t\t\t\t\t\t\t outputs.encoder_last_hidden_state[0, 0, :3, :3]\t\t\t\t\t\t, _SCREAMING_SNAKE_CASE\t\t\t\t\t\t, atol=_SCREAMING_SNAKE_CASE ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttorch.tensor(\r\n\t\t\t\t\t\t\t\t\t [[0.8_9_7_3, 1.1_8_4_7, 1.1_7_7_6], [1.1_9_3_4, 1.5_0_4_0, 1.5_1_2_8], [1.1_1_5_3, 1.4_4_8_6, 1.4_9_5_1]] ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(\r\n\t\t\t\t\t\t\t\t\t torch.allclose(\r\n\t\t\t\t\t\t\t\t\t outputs.pixel_decoder_last_hidden_state[0, 0, :3, :3]\t\t\t\t\t\t, _SCREAMING_SNAKE_CASE\t\t\t\t\t\t, atol=_SCREAMING_SNAKE_CASE ) )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttorch.tensor(\r\n\t\t\t\t\t\t\t\t\t [[2.1_1_5_2, 1.7_0_0_0, -0.8_6_0_3], [1.5_8_0_8, 1.8_0_0_4, -0.9_3_5_3], [1.6_0_4_3, 1.7_4_9_5, -0.5_9_9_9]] ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(\r\n\t\t\t\t\t\t\t\t\t torch.allclose(\r\n\t\t\t\t\t\t\t\t\t outputs.transformer_decoder_last_hidden_state[0, :3, :3]\t\t\t\t\t\t, _SCREAMING_SNAKE_CASE\t\t\t\t\t\t, atol=_SCREAMING_SNAKE_CASE ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tMaskaFormerForUniversalSegmentation.from_pretrained(self.model_checkpoints ).to(_SCREAMING_SNAKE_CASE ).eval()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.default_image_processor\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tprepare_img()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\timage_processor(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tinputs[\"\"\"pixel_values\"\"\"].shape\r\n\t\t\t\t\t\t\t\t\t# check size is divisible by 32\r\n\t\t\t\t\t\t\t\t\tself.assertTrue((inputs_shape[-1] % 32) == 0 and (inputs_shape[-2] % 32) == 0 )\r\n\t\t\t\t\t\t\t\t\t# check size\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(_SCREAMING_SNAKE_CASE\t\t\t\t\t\t, (1, 3, 384, 384) )\r\n\r\n\t\t\t\t\t\t\t\t\twith torch.no_grad():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tmodel(**_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\t# masks_queries_logits\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\toutputs.masks_queries_logits\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t masks_queries_logits.shape\t\t\t\t\t\t, (1, model.config.num_queries, inputs_shape[-2] // 4, inputs_shape[-1] // 4) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t [-8.7_8_3_9, -9.0_0_5_6, -8.8_1_2_1],\r\n\t\t\t\t\t\t\t\t\t [-7.4_1_0_4, -7.0_3_1_3, -6.5_4_0_1],\r\n\t\t\t\t\t\t\t\t\t [-6.6_1_0_5, -6.3_4_2_7, -6.4_6_7_5],\r\n\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(masks_queries_logits[0, 0, :3, :3]\t\t\t\t\t\t, _SCREAMING_SNAKE_CASE\t\t\t\t\t\t, atol=_SCREAMING_SNAKE_CASE ) )\r\n\t\t\t\t\t\t\t\t\t# class_queries_logits\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\toutputs.class_queries_logits\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(class_queries_logits.shape\t\t\t\t\t\t, (1, model.config.num_queries, model.config.num_labels + 1) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttorch.tensor(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t [1.8_3_2_4, -8.0_8_3_5, -4.1_9_2_2],\r\n\t\t\t\t\t\t\t\t\t [0.8_4_5_0, -9.0_0_5_0, -3.6_0_5_3],\r\n\t\t\t\t\t\t\t\t\t [0.3_0_4_5, -7.7_2_9_3, -3.0_2_7_5],\r\n\t\t\t\t\t\t\t\t\t ] ).to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(outputs.class_queries_logits[0, :3, :3]\t\t\t\t\t\t, _SCREAMING_SNAKE_CASE\t\t\t\t\t\t, atol=_SCREAMING_SNAKE_CASE ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tMaskaFormerForUniversalSegmentation.from_pretrained(self.model_checkpoints ).to(_SCREAMING_SNAKE_CASE ).eval()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.default_image_processor\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\timage_processor(\r\n\t\t\t\t\t\t\t\t\t [np.zeros((3, 800, 1333) ), np.zeros((3, 800, 1333) )]\t\t\t\t\t\t, segmentation_maps=[np.zeros((384, 384) ).astype(np.floataa ), np.zeros((384, 384) ).astype(np.floataa )]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\"\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tinputs[\"\"\"pixel_values\"\"\"].to(_SCREAMING_SNAKE_CASE )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[el.to(_SCREAMING_SNAKE_CASE ) for el in inputs[\"\"\"mask_labels\"\"\"]]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[el.to(_SCREAMING_SNAKE_CASE ) for el in inputs[\"\"\"class_labels\"\"\"]]\r\n\r\n\t\t\t\t\t\t\t\t\twith torch.no_grad():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmodel(**_SCREAMING_SNAKE_CASE )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(outputs.loss is not None )\r\n\r\n\r\n\r\n"},"code_codestyle":{"kind":"number","value":713,"string":"713"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport pytest\r\nimport requests\r\n\r\nfrom datasets.utils.file_utils import http_head\r\n\r\nfrom .utils import OfflineSimulationMode, RequestWouldHangIndefinitelyError, offline\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.integration\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith offline(OfflineSimulationMode.CONNECTION_TIMES_OUT ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trequests.request(\"\"\"GET\"\"\" ,\"\"\"https://huggingface.co\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(requests.exceptions.ConnectTimeout ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trequests.request(\"\"\"GET\"\"\" ,\"\"\"https://huggingface.co\"\"\" ,timeout=1.0 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.integration\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith offline(OfflineSimulationMode.CONNECTION_FAILS ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(requests.exceptions.ConnectionError ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trequests.request(\"\"\"GET\"\"\" ,\"\"\"https://huggingface.co\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith offline(OfflineSimulationMode.HF_DATASETS_OFFLINE_SET_TO_1 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\thttp_head(\"\"\"https://huggingface.co\"\"\" )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305163,"cells":{"code":{"kind":"string","value":"import os\r\n\r\nimport pytest\r\n\r\nfrom datasets import (\r\n get_dataset_config_info,\r\n get_dataset_config_names,\r\n get_dataset_infos,\r\n get_dataset_split_names,\r\n inspect_dataset,\r\n inspect_metric,\r\n)\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =pytest.mark.integration\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.parametrize(\"\"\"path\"\"\" ,[\"\"\"paws\"\"\", \"\"\"csv\"\"\"] )\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tinspect_dataset(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tpath + \"\"\".py\"\"\"\r\n\t\t\t\t\t\tassert script_name in os.listdir(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tassert \"__pycache__\" not in os.listdir(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.filterwarnings(\"\"\"ignore:inspect_metric is deprecated:FutureWarning\"\"\" )\r\n@pytest.mark.filterwarnings(\"\"\"ignore:metric_module_factory is deprecated:FutureWarning\"\"\" )\r\n@pytest.mark.parametrize(\"\"\"path\"\"\" ,[\"\"\"accuracy\"\"\"] )\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tinspect_metric(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tpath + \"\"\".py\"\"\"\r\n\t\t\t\t\t\tassert script_name in os.listdir(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tassert \"__pycache__\" not in os.listdir(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.parametrize(\r\n \"\"\"path, config_name, expected_splits\"\"\" ,[\r\n (\"\"\"squad\"\"\", \"\"\"plain_text\"\"\", [\"\"\"train\"\"\", \"\"\"validation\"\"\"]),\r\n (\"\"\"dalle-mini/wit\"\"\", \"\"\"dalle-mini--wit\"\"\", [\"\"\"train\"\"\"]),\r\n (\"\"\"paws\"\"\", \"\"\"labeled_final\"\"\", [\"\"\"train\"\"\", \"\"\"test\"\"\", \"\"\"validation\"\"\"]),\r\n ] ,)\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tget_dataset_config_info(SCREAMING_SNAKE_CASE__ ,config_name=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tassert info.config_name == config_name\r\n\t\t\t\t\t\tassert list(info.splits.keys() ) == expected_splits\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.parametrize(\r\n \"\"\"path, config_name, expected_exception\"\"\" ,[\r\n (\"\"\"paws\"\"\", None, ValueError),\r\n ] ,)\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith pytest.raises(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tget_dataset_config_info(SCREAMING_SNAKE_CASE__ ,config_name=SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.parametrize(\r\n \"\"\"path, expected\"\"\" ,[\r\n (\"\"\"squad\"\"\", \"\"\"plain_text\"\"\"),\r\n (\"\"\"acronym_identification\"\"\", \"\"\"default\"\"\"),\r\n (\"\"\"lhoestq/squad\"\"\", \"\"\"plain_text\"\"\"),\r\n (\"\"\"lhoestq/test\"\"\", \"\"\"default\"\"\"),\r\n (\"\"\"lhoestq/demo1\"\"\", \"\"\"lhoestq--demo1\"\"\"),\r\n (\"\"\"dalle-mini/wit\"\"\", \"\"\"dalle-mini--wit\"\"\"),\r\n ] ,)\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tget_dataset_config_names(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tassert expected in config_names\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.parametrize(\r\n \"\"\"path, expected_configs, expected_splits_in_first_config\"\"\" ,[\r\n (\"\"\"squad\"\"\", [\"\"\"plain_text\"\"\"], [\"\"\"train\"\"\", \"\"\"validation\"\"\"]),\r\n (\"\"\"dalle-mini/wit\"\"\", [\"\"\"dalle-mini--wit\"\"\"], [\"\"\"train\"\"\"]),\r\n (\"\"\"paws\"\"\", [\"\"\"labeled_final\"\"\", \"\"\"labeled_swap\"\"\", \"\"\"unlabeled_final\"\"\"], [\"\"\"train\"\"\", \"\"\"test\"\"\", \"\"\"validation\"\"\"]),\r\n ] ,)\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tget_dataset_infos(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tassert list(infos.keys() ) == expected_configs\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\texpected_configs[0]\r\n\t\t\t\t\t\tassert expected_config in infos\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tinfos[expected_config]\r\n\t\t\t\t\t\tassert info.config_name == expected_config\r\n\t\t\t\t\t\tassert list(info.splits.keys() ) == expected_splits_in_first_config\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.parametrize(\r\n \"\"\"path, expected_config, expected_splits\"\"\" ,[\r\n (\"\"\"squad\"\"\", \"\"\"plain_text\"\"\", [\"\"\"train\"\"\", \"\"\"validation\"\"\"]),\r\n (\"\"\"dalle-mini/wit\"\"\", \"\"\"dalle-mini--wit\"\"\", [\"\"\"train\"\"\"]),\r\n (\"\"\"paws\"\"\", \"\"\"labeled_final\"\"\", [\"\"\"train\"\"\", \"\"\"test\"\"\", \"\"\"validation\"\"\"]),\r\n ] ,)\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tget_dataset_infos(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tassert expected_config in infos\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tinfos[expected_config]\r\n\t\t\t\t\t\tassert info.config_name == expected_config\r\n\t\t\t\t\t\tassert list(info.splits.keys() ) == expected_splits\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.parametrize(\r\n \"\"\"path, config_name, expected_exception\"\"\" ,[\r\n (\"\"\"paws\"\"\", None, ValueError),\r\n ] ,)\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith pytest.raises(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tget_dataset_split_names(SCREAMING_SNAKE_CASE__ ,config_name=SCREAMING_SNAKE_CASE__ )\r\n\r\n"},"code_codestyle":{"kind":"number","value":714,"string":"714"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport pathlib\r\nimport unittest\r\n\r\nimport numpy as np\r\n\r\nfrom transformers.testing_utils import require_torch, require_vision, slow\r\nfrom transformers.utils import is_torch_available, is_vision_available\r\n\r\nfrom ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs\r\n\r\n\r\nif is_torch_available():\r\n\t\timport torch\r\n\r\nif is_vision_available():\r\n\t\tfrom PIL import Image\r\n\r\n\t\tfrom transformers import DetrImageProcessor\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=7\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=30\t\t\t\t\t\t, lowercase_=400\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=1 / 255\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=[0.5, 0.5, 0.5]\t\t\t\t\t\t, lowercase_=[0.5, 0.5, 0.5]\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\t# by setting size[\"longest_edge\"] > max_resolution we're effectively not testing this :p\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tsize if size is not None else {\"\"\"shortest_edge\"\"\": 18, \"\"\"longest_edge\"\"\": 1333}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tparent\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tbatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmin_resolution\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tmax_resolution\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdo_resize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tsize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdo_rescale\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\trescale_factor\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdo_normalize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\timage_mean\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\timage_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdo_pad\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn {\r\n\t\t\t\t\t\t\t\t\t \"do_resize\": self.do_resize,\r\n\t\t\t\t\t\t\t\t\t \"size\": self.size,\r\n\t\t\t\t\t\t\t\t\t \"do_rescale\": self.do_rescale,\r\n\t\t\t\t\t\t\t\t\t \"rescale_factor\": self.rescale_factor,\r\n\t\t\t\t\t\t\t\t\t \"do_normalize\": self.do_normalize,\r\n\t\t\t\t\t\t\t\t\t \"image_mean\": self.image_mean,\r\n\t\t\t\t\t\t\t\t\t \"image_std\": self.image_std,\r\n\t\t\t\t\t\t\t\t\t \"do_pad\": self.do_pad,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=False ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tif not batched:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage_inputs[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif isinstance(lowercase_\t\t\t\t\t\t, Image.Image ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\timage.size\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage.shape[1], image.shape[2]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif w < h:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tint(self.size[\"\"\"shortest_edge\"\"\"] * h / w )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif w > h:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tint(self.size[\"\"\"shortest_edge\"\"\"] * w / h )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : int\t\t\t\t =\t\t\t\tself.get_expected_values([image] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texpected_values.append((expected_height, expected_width) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tmax(lowercase_\t\t\t\t\t\t, key=lambda lowercase_ : item[0] )[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmax(lowercase_\t\t\t\t\t\t, key=lambda lowercase_ : item[1] )[1]\r\n\r\n\t\t\t\t\t\t\t\t\treturn expected_height, expected_width\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_torch\r\n@require_vision\r\nclass _a (\t\t\tsnake_case_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Optional[Any] =\t\tDetrImageProcessor if is_vision_available() else None\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tDetrImageProcessingTester(self )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn self.image_processor_tester.prepare_image_processor_dict()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"image_mean\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"image_std\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_normalize\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_rescale\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"rescale_factor\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_resize\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"size\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_pad\"\"\" ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.image_processing_class.from_dict(self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.size\t\t\t\t\t\t, {\"\"\"shortest_edge\"\"\": 18, \"\"\"longest_edge\"\"\": 1333} )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.do_pad\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.image_processing_class.from_dict(\r\n\t\t\t\t\t\t\t\t\t self.image_processor_dict\t\t\t\t\t\t, size=42\t\t\t\t\t\t, max_size=84\t\t\t\t\t\t, pad_and_return_pixel_mask=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.size\t\t\t\t\t\t, {\"\"\"shortest_edge\"\"\": 42, \"\"\"longest_edge\"\"\": 84} )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.do_pad\t\t\t\t\t\t, lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\t# Initialize image_processing\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random PIL images\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(lowercase_\t\t\t\t\t\t, Image.Image )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (1, self.image_processor_tester.num_channels, expected_height, expected_width)\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_\t\t\t\t\t\t, batched=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_processing(lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t expected_height,\r\n\t\t\t\t\t\t\t\t\t expected_width,\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\t# Initialize image_processing\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random numpy tensors\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=lowercase_\t\t\t\t\t\t, numpify=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(lowercase_\t\t\t\t\t\t, np.ndarray )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (1, self.image_processor_tester.num_channels, expected_height, expected_width)\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\timage_processing(lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : int\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_\t\t\t\t\t\t, batched=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t expected_height,\r\n\t\t\t\t\t\t\t\t\t expected_width,\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\t# Initialize image_processing\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random PyTorch tensors\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=lowercase_\t\t\t\t\t\t, torchify=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(lowercase_\t\t\t\t\t\t, torch.Tensor )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (1, self.image_processor_tester.num_channels, expected_height, expected_width)\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\timage_processing(lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_\t\t\t\t\t\t, batched=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t expected_height,\r\n\t\t\t\t\t\t\t\t\t expected_width,\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@slow\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\t# prepare image and target\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tImage.open(\"\"\"./tests/fixtures/tests_samples/COCO/000000039769.png\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith open(\"\"\"./tests/fixtures/tests_samples/COCO/coco_annotations.txt\"\"\"\t\t\t\t\t\t, \"\"\"r\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tjson.loads(f.read() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t{\"\"\"image_id\"\"\": 39769, \"\"\"annotations\"\"\": target}\r\n\r\n\t\t\t\t\t\t\t\t\t# encode them\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tDetrImageProcessor.from_pretrained(\"\"\"facebook/detr-resnet-50\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\timage_processing(images=lowercase_\t\t\t\t\t\t, annotations=lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify pixel values\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.Size([1, 3, 800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"pixel_values\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"pixel_values\"\"\"][0, 0, 0, :3]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-4 ) )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify area\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.tensor([5_8_8_7.9_6_0_0, 1_1_2_5_0.2_0_6_1, 4_8_9_3_5_3.8_4_3_8, 8_3_7_1_2_2.7_5_0_0, 1_4_7_9_6_7.5_1_5_6, 1_6_5_7_3_2.3_4_3_8] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"area\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify boxes\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.Size([6, 4] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttorch.tensor([0.5_5_0_3, 0.2_7_6_5, 0.0_6_0_4, 0.2_2_1_5] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"][0]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-3 ) )\r\n\t\t\t\t\t\t\t\t\t# verify image_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttorch.tensor([39769] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"image_id\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify is_crowd\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([0, 0, 0, 0, 0, 0] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"iscrowd\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify class_labels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([75, 75, 63, 65, 17, 17] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"class_labels\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify orig_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttorch.tensor([480, 640] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"orig_size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@slow\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\t# prepare image, target and masks_path\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tImage.open(\"\"\"./tests/fixtures/tests_samples/COCO/000000039769.png\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith open(\"\"\"./tests/fixtures/tests_samples/COCO/coco_panoptic_annotations.txt\"\"\"\t\t\t\t\t\t, \"\"\"r\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tjson.loads(f.read() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\"\"\"file_name\"\"\": \"\"\"000000039769.png\"\"\", \"\"\"image_id\"\"\": 39769, \"\"\"segments_info\"\"\": target}\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tpathlib.Path(\"\"\"./tests/fixtures/tests_samples/COCO/coco_panoptic\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# encode them\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tDetrImageProcessor.from_pretrained(\"\"\"facebook/detr-resnet-50-panoptic\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage_processing(images=lowercase_\t\t\t\t\t\t, annotations=lowercase_\t\t\t\t\t\t, masks_path=lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify pixel values\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.Size([1, 3, 800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"pixel_values\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"pixel_values\"\"\"][0, 0, 0, :3]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-4 ) )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify area\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([1_4_7_9_7_9.6_8_7_5, 1_6_5_5_2_7.0_4_6_9, 4_8_4_6_3_8.5_9_3_8, 1_1_2_9_2.9_3_7_5, 5_8_7_9.6_5_6_2, 7_6_3_4.1_1_4_7] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"area\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify boxes\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttorch.Size([6, 4] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([0.2_6_2_5, 0.5_4_3_7, 0.4_6_8_8, 0.8_6_2_5] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"][0]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-3 ) )\r\n\t\t\t\t\t\t\t\t\t# verify image_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.tensor([39769] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"image_id\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify is_crowd\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttorch.tensor([0, 0, 0, 0, 0, 0] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"iscrowd\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify class_labels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.tensor([17, 17, 63, 75, 75, 93] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"class_labels\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify masks\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t822873\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"labels\"\"\"][0][\"\"\"masks\"\"\"].sum().item()\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\t# verify orig_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([480, 640] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"orig_size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.tensor([800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305164,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport os\r\nimport sys\r\nimport tempfile\r\nimport unittest\r\nfrom pathlib import Path\r\nfrom shutil import copyfile\r\n\r\nfrom huggingface_hub import HfFolder, Repository, create_repo, delete_repo\r\nfrom requests.exceptions import HTTPError\r\n\r\nimport transformers\r\nfrom transformers import (\r\n CONFIG_MAPPING,\r\n FEATURE_EXTRACTOR_MAPPING,\r\n PROCESSOR_MAPPING,\r\n TOKENIZER_MAPPING,\r\n AutoConfig,\r\n AutoFeatureExtractor,\r\n AutoProcessor,\r\n AutoTokenizer,\r\n BertTokenizer,\r\n ProcessorMixin,\r\n WavaVecaConfig,\r\n WavaVecaFeatureExtractor,\r\n WavaVecaProcessor,\r\n)\r\nfrom transformers.testing_utils import TOKEN, USER, get_tests_dir, is_staging_test\r\nfrom transformers.tokenization_utils import TOKENIZER_CONFIG_FILE\r\nfrom transformers.utils import FEATURE_EXTRACTOR_NAME, is_tokenizers_available\r\n\r\n\r\nsys.path.append(str(Path(__file__).parent.parent.parent.parent / 'utils'))\r\n\r\nfrom test_module.custom_configuration import CustomConfig # noqa E402\r\nfrom test_module.custom_feature_extraction import CustomFeatureExtractor # noqa E402\r\nfrom test_module.custom_processing import CustomProcessor # noqa E402\r\nfrom test_module.custom_tokenization import CustomTokenizer # noqa E402\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =get_tests_dir('fixtures/dummy_feature_extractor_config.json')\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =get_tests_dir('fixtures/vocab.json')\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =get_tests_dir('fixtures')\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\t[\"\"\"[UNK]\"\"\", \"\"\"[CLS]\"\"\", \"\"\"[SEP]\"\"\", \"\"\"[PAD]\"\"\", \"\"\"[MASK]\"\"\", \"\"\"bla\"\"\", \"\"\"blou\"\"\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t0\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\"\"\"facebook/wav2vec2-base-960h\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertIsInstance(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tWavaVecaConfig()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\"\"\"facebook/wav2vec2-base-960h\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# save in new folder\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmodel_config.save_pretrained(__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprocessor.save_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertIsInstance(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# copy relevant files\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcopyfile(__lowercase\t\t\t\t\t\t, os.path.join(__lowercase\t\t\t\t\t\t, __lowercase ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcopyfile(__lowercase\t\t\t\t\t\t, os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"vocab.json\"\"\" ) )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertIsInstance(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tWavaVecaFeatureExtractor()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tAutoTokenizer.from_pretrained(\"\"\"facebook/wav2vec2-base-960h\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tWavaVecaProcessor(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# save in new folder\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprocessor.save_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# drop `processor_class` in tokenizer\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(__lowercase\t\t\t\t\t\t, __lowercase )\t\t\t\t\t\t, \"\"\"r\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tjson.load(__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tconfig_dict.pop(\"\"\"processor_class\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(__lowercase\t\t\t\t\t\t, __lowercase )\t\t\t\t\t\t, \"\"\"w\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tf.write(json.dumps(__lowercase ) )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertIsInstance(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tWavaVecaFeatureExtractor()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tAutoTokenizer.from_pretrained(\"\"\"facebook/wav2vec2-base-960h\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tWavaVecaProcessor(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# save in new folder\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprocessor.save_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# drop `processor_class` in feature extractor\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(__lowercase\t\t\t\t\t\t, __lowercase )\t\t\t\t\t\t, \"\"\"r\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tjson.load(__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tconfig_dict.pop(\"\"\"processor_class\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(__lowercase\t\t\t\t\t\t, __lowercase )\t\t\t\t\t\t, \"\"\"w\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tf.write(json.dumps(__lowercase ) )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertIsInstance(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tWavaVecaConfig(processor_class=\"\"\"Wav2Vec2Processor\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmodel_config.save_pretrained(__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# copy relevant files\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcopyfile(__lowercase\t\t\t\t\t\t, os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"vocab.json\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# create emtpy sample processor\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(__lowercase\t\t\t\t\t\t, __lowercase )\t\t\t\t\t\t, \"\"\"w\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tf.write(\"\"\"{}\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertIsInstance(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\t# If remote code is not set, we will time out when asking whether to load the model.\r\n\t\t\t\t\t\t\t\t\twith self.assertRaises(__lowercase ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\"\"\"hf-internal-testing/test_dynamic_processor\"\"\" )\r\n\t\t\t\t\t\t\t\t\t# If remote code is disabled, we can't load this config.\r\n\t\t\t\t\t\t\t\t\twith self.assertRaises(__lowercase ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"hf-internal-testing/test_dynamic_processor\"\"\"\t\t\t\t\t\t, trust_remote_code=__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\"\"\"hf-internal-testing/test_dynamic_processor\"\"\"\t\t\t\t\t\t, trust_remote_code=__lowercase )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(processor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(processor.__class__.__name__\t\t\t\t\t\t, \"\"\"NewProcessor\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tprocessor.feature_extractor\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(feature_extractor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(feature_extractor.__class__.__name__\t\t\t\t\t\t, \"\"\"NewFeatureExtractor\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tprocessor.tokenizer\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(tokenizer.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\tif is_tokenizers_available():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(tokenizer.__class__.__name__\t\t\t\t\t\t, \"\"\"NewTokenizerFast\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Test we can also load the slow version\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"hf-internal-testing/test_dynamic_processor\"\"\"\t\t\t\t\t\t, trust_remote_code=__lowercase\t\t\t\t\t\t, use_fast=__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnew_processor.tokenizer\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(new_tokenizer.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(new_tokenizer.__class__.__name__\t\t\t\t\t\t, \"\"\"NewTokenizer\"\"\" )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(tokenizer.__class__.__name__\t\t\t\t\t\t, \"\"\"NewTokenizer\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoConfig.register(\"\"\"custom\"\"\"\t\t\t\t\t\t, __lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoFeatureExtractor.register(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoTokenizer.register(__lowercase\t\t\t\t\t\t, slow_tokenizer_class=__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoProcessor.register(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Trying to register something existing in the Transformers library will raise an error\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith self.assertRaises(__lowercase ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoProcessor.register(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Now that the config is registered, it can be used as any other config with the auto-API\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tCustomFeatureExtractor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tos.path.join(__lowercase\t\t\t\t\t\t, \"\"\"vocab.txt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(__lowercase\t\t\t\t\t\t, \"\"\"w\"\"\"\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as vocab_writer:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tvocab_writer.write(\"\"\"\"\"\".join([x + \"\"\"\\n\"\"\" for x in self.vocab_tokens] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tCustomTokenizer(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tCustomProcessor(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprocessor.save_pretrained(__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tfinally:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif \"custom\" in CONFIG_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel CONFIG_MAPPING._extra_content[\"custom\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif CustomConfig in FEATURE_EXTRACTOR_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel FEATURE_EXTRACTOR_MAPPING._extra_content[CustomConfig]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif CustomConfig in TOKENIZER_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel TOKENIZER_MAPPING._extra_content[CustomConfig]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif CustomConfig in PROCESSOR_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel PROCESSOR_MAPPING._extra_content[CustomConfig]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\tclass _a (\t\t\tlowercase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t_UpperCamelCase: Union[str, Any] =\t\tFalse\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\tclass _a (\t\t\tlowercase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t_UpperCamelCase: Dict =\t\tFalse\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\tclass _a (\t\t\tlowercase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t_UpperCamelCase: Union[str, Any] =\t\t\"\"\"AutoFeatureExtractor\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t_UpperCamelCase: List[str] =\t\t\"\"\"AutoTokenizer\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t_UpperCamelCase: Union[str, Any] =\t\tFalse\r\n\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoConfig.register(\"\"\"custom\"\"\"\t\t\t\t\t\t, __lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoFeatureExtractor.register(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoTokenizer.register(__lowercase\t\t\t\t\t\t, slow_tokenizer_class=__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tAutoProcessor.register(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# If remote code is not set, the default is to use local classes.\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\"\"\"hf-internal-testing/test_dynamic_processor\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(processor.__class__.__name__\t\t\t\t\t\t, \"\"\"NewProcessor\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertFalse(processor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertFalse(processor.feature_extractor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertFalse(processor.tokenizer.special_attribute_present )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# If remote code is disabled, we load the local ones.\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"hf-internal-testing/test_dynamic_processor\"\"\"\t\t\t\t\t\t, trust_remote_code=__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(processor.__class__.__name__\t\t\t\t\t\t, \"\"\"NewProcessor\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertFalse(processor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertFalse(processor.feature_extractor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertFalse(processor.tokenizer.special_attribute_present )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# If remote is enabled, we load from the Hub.\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"hf-internal-testing/test_dynamic_processor\"\"\"\t\t\t\t\t\t, trust_remote_code=__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(processor.__class__.__name__\t\t\t\t\t\t, \"\"\"NewProcessor\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(processor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(processor.feature_extractor.special_attribute_present )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(processor.tokenizer.special_attribute_present )\r\n\r\n\t\t\t\t\t\t\t\t\tfinally:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif \"custom\" in CONFIG_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel CONFIG_MAPPING._extra_content[\"custom\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif CustomConfig in FEATURE_EXTRACTOR_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel FEATURE_EXTRACTOR_MAPPING._extra_content[CustomConfig]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif CustomConfig in TOKENIZER_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel TOKENIZER_MAPPING._extra_content[CustomConfig]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif CustomConfig in PROCESSOR_MAPPING._extra_content:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdel PROCESSOR_MAPPING._extra_content[CustomConfig]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\"\"\"hf-internal-testing/tiny-random-bert\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(processor.__class__.__name__\t\t\t\t\t\t, \"\"\"BertTokenizerFast\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(\"\"\"hf-internal-testing/tiny-random-convnext\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(processor.__class__.__name__\t\t\t\t\t\t, \"\"\"ConvNextImageProcessor\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n@is_staging_test\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\t\t\t_UpperCamelCase: List[Any] =\t\t[\"\"\"[UNK]\"\"\", \"\"\"[CLS]\"\"\", \"\"\"[SEP]\"\"\", \"\"\"[PAD]\"\"\", \"\"\"[MASK]\"\"\", \"\"\"bla\"\"\", \"\"\"blou\"\"\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@classmethod\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tcls ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tTOKEN\r\n\t\t\t\t\t\t\t\t\tHfFolder.save_token(__lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@classmethod\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tcls ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdelete_repo(token=cls._token\t\t\t\t\t\t, repo_id=\"\"\"test-processor\"\"\" )\r\n\t\t\t\t\t\t\t\t\texcept HTTPError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpass\r\n\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdelete_repo(token=cls._token\t\t\t\t\t\t, repo_id=\"\"\"valid_org/test-processor-org\"\"\" )\r\n\t\t\t\t\t\t\t\t\texcept HTTPError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpass\r\n\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdelete_repo(token=cls._token\t\t\t\t\t\t, repo_id=\"\"\"test-dynamic-processor\"\"\" )\r\n\t\t\t\t\t\t\t\t\texcept HTTPError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tWavaVecaProcessor.from_pretrained(__lowercase )\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprocessor.save_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"test-processor\"\"\" )\t\t\t\t\t\t, push_to_hub=__lowercase\t\t\t\t\t\t, use_auth_token=self._token )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tWavaVecaProcessor.from_pretrained(f\"\"\"{USER}/test-processor\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor k, v in processor.feature_extractor.__dict__.items():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(__lowercase\t\t\t\t\t\t, getattr(new_processor.feature_extractor\t\t\t\t\t\t, __lowercase ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertDictEqual(new_processor.tokenizer.get_vocab()\t\t\t\t\t\t, processor.tokenizer.get_vocab() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tWavaVecaProcessor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprocessor.save_pretrained(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"test-processor-org\"\"\" )\t\t\t\t\t\t, push_to_hub=__lowercase\t\t\t\t\t\t, use_auth_token=self._token\t\t\t\t\t\t, organization=\"\"\"valid_org\"\"\"\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tWavaVecaProcessor.from_pretrained(\"\"\"valid_org/test-processor-org\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor k, v in processor.feature_extractor.__dict__.items():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(__lowercase\t\t\t\t\t\t, getattr(new_processor.feature_extractor\t\t\t\t\t\t, __lowercase ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertDictEqual(new_processor.tokenizer.get_vocab()\t\t\t\t\t\t, processor.tokenizer.get_vocab() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tCustomFeatureExtractor.register_for_auto_class()\r\n\t\t\t\t\t\t\t\t\tCustomTokenizer.register_for_auto_class()\r\n\t\t\t\t\t\t\t\t\tCustomProcessor.register_for_auto_class()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tCustomFeatureExtractor.from_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tos.path.join(__lowercase\t\t\t\t\t\t, \"\"\"vocab.txt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(__lowercase\t\t\t\t\t\t, \"\"\"w\"\"\"\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as vocab_writer:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tvocab_writer.write(\"\"\"\"\"\".join([x + \"\"\"\\n\"\"\" for x in self.vocab_tokens] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tCustomTokenizer(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tCustomProcessor(__lowercase\t\t\t\t\t\t, __lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmp_dir:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcreate_repo(f\"\"\"{USER}/test-dynamic-processor\"\"\"\t\t\t\t\t\t, token=self._token )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tRepository(__lowercase\t\t\t\t\t\t, clone_from=f\"\"\"{USER}/test-dynamic-processor\"\"\"\t\t\t\t\t\t, token=self._token )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprocessor.save_pretrained(__lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# This has added the proper auto_map field to the feature extractor config\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertDictEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t processor.feature_extractor.auto_map\t\t\t\t\t\t, {\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"AutoFeatureExtractor\"\"\": \"\"\"custom_feature_extraction.CustomFeatureExtractor\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"AutoProcessor\"\"\": \"\"\"custom_processing.CustomProcessor\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t }\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# This has added the proper auto_map field to the tokenizer config\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"tokenizer_config.json\"\"\" ) ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tjson.load(__lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertDictEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t tokenizer_config[\"\"\"auto_map\"\"\"]\t\t\t\t\t\t, {\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"AutoTokenizer\"\"\": [\"\"\"custom_tokenization.CustomTokenizer\"\"\", None],\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"AutoProcessor\"\"\": \"\"\"custom_processing.CustomProcessor\"\"\",\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t }\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# The code has been copied from fixtures\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(os.path.isfile(os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"custom_feature_extraction.py\"\"\" ) ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(os.path.isfile(os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"custom_tokenization.py\"\"\" ) ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue(os.path.isfile(os.path.join(__lowercase\t\t\t\t\t\t, \"\"\"custom_processing.py\"\"\" ) ) )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trepo.push_to_hub()\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tAutoProcessor.from_pretrained(f\"\"\"{USER}/test-dynamic-processor\"\"\"\t\t\t\t\t\t, trust_remote_code=__lowercase )\r\n\t\t\t\t\t\t\t\t\t# Can't make an isinstance check because the new_processor is from the CustomProcessor class of a dynamic module\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(new_processor.__class__.__name__\t\t\t\t\t\t, \"\"\"CustomProcessor\"\"\" )\r\n\r\n"},"code_codestyle":{"kind":"number","value":715,"string":"715"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\twhile b > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif b & 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tres += a\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\ta += a\r\n\t\t\t\t\t\t\t\t\t\t\t\tb >>= 1\r\n\r\n\t\t\t\t\t\treturn res\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\twhile b > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif b & 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t((res % c) + (a % c)) % c\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\ta += a\r\n\t\t\t\t\t\t\t\t\t\t\t\tb >>= 1\r\n\r\n\t\t\t\t\t\treturn res\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305165,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\nfrom ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny ={\r\n \"\"\"facebook/convnextv2-tiny-1k-224\"\"\": \"\"\"https://huggingface.co/facebook/convnextv2-tiny-1k-224/resolve/main/config.json\"\"\",\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\t__snake_case , __snake_case ):\r\n _UpperCamelCase: int =\t\t'convnextv2'\r\n\r\n\r\n\r\n\r\n\r\n\r\n def __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=\"gelu\"\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-12\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=224\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tint:\r\n super().__init__(**A_ )\r\n\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tnum_channels\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tpatch_size\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tnum_stages\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[96, 192, 384, 768] if hidden_sizes is None else hidden_sizes\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[3, 3, 9, 3] if depths is None else depths\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\thidden_act\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tinitializer_range\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tlayer_norm_eps\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tdrop_path_rate\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_size\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[\"stem\"] + [f\"\"\"stage{idx}\"\"\" for idx in range(1\t\t\t\t\t\t, len(self.depths ) + 1 )]\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tget_aligned_output_features_output_indices(\r\n out_features=A_\t\t\t\t\t\t, out_indices=A_\t\t\t\t\t\t, stage_names=self.stage_names )\r\n\r\n"},"code_codestyle":{"kind":"number","value":716,"string":"716"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom math import factorial\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\treal\r\n\t\t\t\t\t\t\t\t\tif isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[1] * rank\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\trank\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __repr__(\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (\r\n\t\t\t\t\t\t\t\t\t f\"\"\"{self.real}+\"\"\"\r\n\t\t\t\t\t\t\t\t\t f\"\"\"{'+'.join(str(lowercase_ )+'E'+str(n+1 )for n,dual in enumerate(self.duals ) )}\"\"\"\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.duals.copy()\r\n\t\t\t\t\t\t\t\t\twhile cur[-1] == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcur.pop(-1 )\r\n\t\t\t\t\t\t\t\t\treturn Dual(self.real\t\t\t\t\t\t, lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __add__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real + other\t\t\t\t\t\t, self.duals )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.duals.copy()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tother.duals.copy()\r\n\t\t\t\t\t\t\t\t\tif len(lowercase_ ) > len(lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\to_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) )\r\n\t\t\t\t\t\t\t\t\telif len(lowercase_ ) < len(lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ts_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tfor i in range(len(lowercase_ ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(s_dual[i] + o_dual[i] )\r\n\t\t\t\t\t\t\t\t\treturn Dual(self.real + other.real\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t_UpperCamelCase: List[Any] =\t\t__add__\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __sub__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn self + other * -1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __mul__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor i in self.duals:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(i * other )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real * other\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[0] * (len(self.duals ) + len(other.duals ) + 1)\r\n\t\t\t\t\t\t\t\t\tfor i, item in enumerate(self.duals ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor j, jtem in enumerate(other.duals ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals[i + j + 1] += item * jtem\r\n\t\t\t\t\t\t\t\t\tfor k in range(len(self.duals ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals[k] += self.duals[k] * other.real\r\n\t\t\t\t\t\t\t\t\tfor index in range(len(other.duals ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals[index] += other.duals[index] * self.real\r\n\t\t\t\t\t\t\t\t\treturn Dual(self.real * other.real\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t_UpperCamelCase: str =\t\t__mul__\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __truediv__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor i in self.duals:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(i / other )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real / other\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\traise ValueError\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __floordiv__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor i in self.duals:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(i // other )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real // other\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\traise ValueError\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __pow__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tif n < 0 or isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"power must be a positive integer\"\"\" )\r\n\t\t\t\t\t\t\t\t\tif n == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn 1\r\n\t\t\t\t\t\t\t\t\tif n == 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn self\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself\r\n\t\t\t\t\t\t\t\t\tfor _ in range(n - 1 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tx *= self\r\n\t\t\t\t\t\t\t\t\treturn x\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif not callable(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"differentiate() requires a function as input for func\"\"\" )\r\n\t\t\t\t\t\tif not isinstance(SCREAMING_SNAKE_CASE__ ,(float, int) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"differentiate() requires a float as input for position\"\"\" )\r\n\t\t\t\t\t\tif not isinstance(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"differentiate() requires an int as input for order\"\"\" )\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tDual(SCREAMING_SNAKE_CASE__ ,1 )\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tfunc(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tif order == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn result.real\r\n\t\t\t\t\t\treturn result.duals[order - 1] * factorial(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\tdef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\treturn y**2 * y**4\r\n\r\n\t\tprint(differentiate(f, 9, 2))\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305166,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport hashlib\r\nimport unittest\r\n\r\nfrom transformers import MODEL_FOR_DEPTH_ESTIMATION_MAPPING, is_torch_available, is_vision_available\r\nfrom transformers.pipelines import DepthEstimationPipeline, pipeline\r\nfrom transformers.testing_utils import (\r\n is_pipeline_test,\r\n nested_simplify,\r\n require_tf,\r\n require_timm,\r\n require_torch,\r\n require_vision,\r\n slow,\r\n)\r\n\r\nfrom .test_pipelines_common import ANY\r\n\r\n\r\nif is_torch_available():\r\n\t\timport torch\r\n\r\nif is_vision_available():\r\n\t\tfrom PIL import Image\r\nelse:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\tclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t@staticmethod\r\n\t\t\t\t\tdef _snake_case (\t\t\t\t\t\t*lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\thashlib.mda(image.tobytes() )\r\n\t\t\t\t\t\treturn m.hexdigest()\r\n\r\n\r\n\r\n\r\n\r\n\r\n@is_pipeline_test\r\n@require_vision\r\n@require_timm\r\n@require_torch\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\t\t\t_UpperCamelCase =\t\tMODEL_FOR_DEPTH_ESTIMATION_MAPPING\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tDepthEstimationPipeline(model=lowercase_\t\t\t\t\t\t, image_processor=lowercase_ )\r\n\t\t\t\t\t\t\t\t\treturn depth_estimator, [\r\n\t\t\t\t\t\t\t\t\t \"./tests/fixtures/tests_samples/COCO/000000039769.png\",\r\n\t\t\t\t\t\t\t\t\t \"./tests/fixtures/tests_samples/COCO/000000039769.png\",\r\n\t\t\t\t\t\t\t\t\t]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdepth_estimator(\"\"\"./tests/fixtures/tests_samples/COCO/000000039769.png\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual({\"\"\"predicted_depth\"\"\": ANY(torch.Tensor ), \"\"\"depth\"\"\": ANY(Image.Image )}\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\timport datasets\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tdatasets.load_dataset(\"\"\"hf-internal-testing/fixtures_image_utils\"\"\"\t\t\t\t\t\t, \"\"\"image\"\"\"\t\t\t\t\t\t, split=\"\"\"test\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdepth_estimator(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t Image.open(\"\"\"./tests/fixtures/tests_samples/COCO/000000039769.png\"\"\" ),\r\n\t\t\t\t\t\t\t\t\t \"\"\"http://images.cocodataset.org/val2017/000000039769.jpg\"\"\",\r\n\t\t\t\t\t\t\t\t\t # RGBA\r\n\t\t\t\t\t\t\t\t\t dataset[0][\"\"\"file\"\"\"],\r\n\t\t\t\t\t\t\t\t\t # LA\r\n\t\t\t\t\t\t\t\t\t dataset[1][\"\"\"file\"\"\"],\r\n\t\t\t\t\t\t\t\t\t # L\r\n\t\t\t\t\t\t\t\t\t dataset[2][\"\"\"file\"\"\"],\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t {\"\"\"predicted_depth\"\"\": ANY(torch.Tensor ), \"\"\"depth\"\"\": ANY(Image.Image )},\r\n\t\t\t\t\t\t\t\t\t {\"\"\"predicted_depth\"\"\": ANY(torch.Tensor ), \"\"\"depth\"\"\": ANY(Image.Image )},\r\n\t\t\t\t\t\t\t\t\t {\"\"\"predicted_depth\"\"\": ANY(torch.Tensor ), \"\"\"depth\"\"\": ANY(Image.Image )},\r\n\t\t\t\t\t\t\t\t\t {\"\"\"predicted_depth\"\"\": ANY(torch.Tensor ), \"\"\"depth\"\"\": ANY(Image.Image )},\r\n\t\t\t\t\t\t\t\t\t {\"\"\"predicted_depth\"\"\": ANY(torch.Tensor ), \"\"\"depth\"\"\": ANY(Image.Image )},\r\n\t\t\t\t\t\t\t\t\t ]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_tf\r\n\t\t\t@unittest.skip(\"\"\"Depth estimation is not implemented in TF\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@slow\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t\"\"\"Intel/dpt-large\"\"\"\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tpipeline(\"\"\"depth-estimation\"\"\"\t\t\t\t\t\t, model=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tdepth_estimator(\"\"\"http://images.cocodataset.org/val2017/000000039769.jpg\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\thashimage(outputs[\"\"\"depth\"\"\"] )\r\n\r\n\t\t\t\t\t\t\t\t\t# This seems flaky.\r\n\t\t\t\t\t\t\t\t\t# self.assertEqual(outputs[\"depth\"], \"1a39394e282e9f3b0741a90b9f108977\")\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(nested_simplify(outputs[\"\"\"predicted_depth\"\"\"].max().item() )\t\t\t\t\t\t, 2_9.3_0_4 )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(nested_simplify(outputs[\"\"\"predicted_depth\"\"\"].min().item() )\t\t\t\t\t\t, 2.6_6_2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\t# This is highly irregular to have no small tests.\r\n\t\t\t\t\t\t\t\t\tself.skipTest(\"\"\"There is not hf-internal-testing tiny model for either GLPN nor DPT\"\"\" )\r\n\r\n"},"code_codestyle":{"kind":"number","value":717,"string":"717"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ..utils import DummyObject, requires_backends\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tmetaclass=snake_case_ ):\r\n\t\t\t_UpperCamelCase: List[Any] =\t\t[\"keras_nlp\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, *lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\trequires_backends(self\t\t\t\t\t\t, [\"\"\"keras_nlp\"\"\"] )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305167,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom scipy.stats import pearsonr, spearmanr\r\nfrom sklearn.metrics import fa_score, matthews_corrcoef\r\n\r\nimport datasets\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr ='\\\\n@inproceedings{wang2019glue,\\n title={{GLUE}: A Multi-Task Benchmark and Analysis Platform for Natural Language Understanding},\\n author={Wang, Alex and Singh, Amanpreet and Michael, Julian and Hill, Felix and Levy, Omer and Bowman, Samuel R.},\\n note={In the Proceedings of ICLR.},\\n year={2019}\\n}\\n'\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] ='\\\\nGLUE, the General Language Understanding Evaluation benchmark\\n(https://gluebenchmark.com/) is a collection of resources for training,\\nevaluating, and analyzing natural language understanding systems.\\n'\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ='\\nCompute GLUE evaluation metric associated to each GLUE dataset.\\nArgs:\\n predictions: list of predictions to score.\\n Each translation should be tokenized into a list of tokens.\\n references: list of lists of references for each translation.\\n Each reference should be tokenized into a list of tokens.\\nReturns: depending on the GLUE subset, one or several of:\\n \"accuracy\": Accuracy\\n \"f1\": F1 score\\n \"pearson\": Pearson Correlation\\n \"spearmanr\": Spearman Correlation\\n \"matthews_correlation\": Matthew Correlation\\nExamples:\\n\\n >>> glue_metric = datasets.load_metric(\\'glue\\', \\'sst2\\') # \\'sst2\\' or any of [\"mnli\", \"mnli_mismatched\", \"mnli_matched\", \"qnli\", \"rte\", \"wnli\", \"hans\"]\\n >>> references = [0, 1]\\n >>> predictions = [0, 1]\\n >>> results = glue_metric.compute(predictions=predictions, references=references)\\n >>> print(results)\\n {\\'accuracy\\': 1.0}\\n\\n >>> glue_metric = datasets.load_metric(\\'glue\\', \\'mrpc\\') # \\'mrpc\\' or \\'qqp\\'\\n >>> references = [0, 1]\\n >>> predictions = [0, 1]\\n >>> results = glue_metric.compute(predictions=predictions, references=references)\\n >>> print(results)\\n {\\'accuracy\\': 1.0, \\'f1\\': 1.0}\\n\\n >>> glue_metric = datasets.load_metric(\\'glue\\', \\'stsb\\')\\n >>> references = [0., 1., 2., 3., 4., 5.]\\n >>> predictions = [0., 1., 2., 3., 4., 5.]\\n >>> results = glue_metric.compute(predictions=predictions, references=references)\\n >>> print({\"pearson\": round(results[\"pearson\"], 2), \"spearmanr\": round(results[\"spearmanr\"], 2)})\\n {\\'pearson\\': 1.0, \\'spearmanr\\': 1.0}\\n\\n >>> glue_metric = datasets.load_metric(\\'glue\\', \\'cola\\')\\n >>> references = [0, 1]\\n >>> predictions = [0, 1]\\n >>> results = glue_metric.compute(predictions=predictions, references=references)\\n >>> print(results)\\n {\\'matthews_correlation\\': 1.0}\\n'\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\treturn float((preds == labels).mean() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tsimple_accuracy(__lowerCAmelCase ,__lowerCAmelCase )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tfloat(fa_score(y_true=__lowerCAmelCase ,y_pred=__lowerCAmelCase ) )\r\n\t\t\t\t\t\treturn {\r\n\t\t\t\t\t\t \"accuracy\": acc,\r\n\t\t\t\t\t\t \"f1\": fa,\r\n\t\t\t\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tfloat(pearsonr(__lowerCAmelCase ,__lowerCAmelCase )[0] )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tfloat(spearmanr(__lowerCAmelCase ,__lowerCAmelCase )[0] )\r\n\t\t\t\t\t\treturn {\r\n\t\t\t\t\t\t \"pearson\": pearson_corr,\r\n\t\t\t\t\t\t \"spearmanr\": spearman_corr,\r\n\t\t\t\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n@datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION , _KWARGS_DESCRIPTION )\r\nclass _a (\t\t\tdatasets.Metric ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\tif self.config_name not in [\r\n\t\t\t\t\t\t\t\t\t \"sst2\",\r\n\t\t\t\t\t\t\t\t\t \"mnli\",\r\n\t\t\t\t\t\t\t\t\t \"mnli_mismatched\",\r\n\t\t\t\t\t\t\t\t\t \"mnli_matched\",\r\n\t\t\t\t\t\t\t\t\t \"cola\",\r\n\t\t\t\t\t\t\t\t\t \"stsb\",\r\n\t\t\t\t\t\t\t\t\t \"mrpc\",\r\n\t\t\t\t\t\t\t\t\t \"qqp\",\r\n\t\t\t\t\t\t\t\t\t \"qnli\",\r\n\t\t\t\t\t\t\t\t\t \"rte\",\r\n\t\t\t\t\t\t\t\t\t \"wnli\",\r\n\t\t\t\t\t\t\t\t\t \"hans\",\r\n\t\t\t\t\t\t\t\t\t]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise KeyError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"You should supply a configuration name selected in \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"[\\\"sst2\\\", \\\"mnli\\\", \\\"mnli_mismatched\\\", \\\"mnli_matched\\\", \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"\\\"cola\\\", \\\"stsb\\\", \\\"mrpc\\\", \\\"qqp\\\", \\\"qnli\\\", \\\"rte\\\", \\\"wnli\\\", \\\"hans\\\"]\"\"\" )\r\n\t\t\t\t\t\t\t\t\treturn datasets.MetricInfo(\r\n\t\t\t\t\t\t\t\t\t description=_DESCRIPTION\t\t\t\t\t\t, citation=_CITATION\t\t\t\t\t\t, inputs_description=_KWARGS_DESCRIPTION\t\t\t\t\t\t, features=datasets.Features(\r\n\t\t\t\t\t\t\t\t\t {\r\n\t\t\t\t\t\t\t\t\t \"\"\"predictions\"\"\": datasets.Value(\"\"\"int64\"\"\" if self.config_name != \"\"\"stsb\"\"\" else \"\"\"float32\"\"\" ),\r\n\t\t\t\t\t\t\t\t\t \"\"\"references\"\"\": datasets.Value(\"\"\"int64\"\"\" if self.config_name != \"\"\"stsb\"\"\" else \"\"\"float32\"\"\" ),\r\n\t\t\t\t\t\t\t\t\t } )\t\t\t\t\t\t, codebase_urls=[]\t\t\t\t\t\t, reference_urls=[]\t\t\t\t\t\t, format=\"\"\"numpy\"\"\"\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\t\tif self.config_name == \"cola\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn {\"matthews_correlation\": matthews_corrcoef(lowerCamelCase__\t\t\t\t\t\t, lowerCamelCase__ )}\r\n\t\t\t\t\t\t\t\t\telif self.config_name == \"stsb\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn pearson_and_spearman(lowerCamelCase__\t\t\t\t\t\t, lowerCamelCase__ )\r\n\t\t\t\t\t\t\t\t\telif self.config_name in [\"mrpc\", \"qqp\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn acc_and_fa(lowerCamelCase__\t\t\t\t\t\t, lowerCamelCase__ )\r\n\t\t\t\t\t\t\t\t\telif self.config_name in [\"sst2\", \"mnli\", \"mnli_mismatched\", \"mnli_matched\", \"qnli\", \"rte\", \"wnli\", \"hans\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn {\"accuracy\": simple_accuracy(lowerCamelCase__\t\t\t\t\t\t, lowerCamelCase__ )}\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise KeyError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"You should supply a configuration name selected in \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"[\\\"sst2\\\", \\\"mnli\\\", \\\"mnli_mismatched\\\", \\\"mnli_matched\\\", \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"\\\"cola\\\", \\\"stsb\\\", \\\"mrpc\\\", \\\"qqp\\\", \\\"qnli\\\", \\\"rte\\\", \\\"wnli\\\", \\\"hans\\\"]\"\"\" )\r\n\r\n"},"code_codestyle":{"kind":"number","value":718,"string":"718"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...utils import (\r\n OptionalDependencyNotAvailable,\r\n is_torch_available,\r\n is_transformers_available,\r\n is_transformers_version,\r\n)\r\n\r\n\r\ntry:\r\n\t\tif not (is_transformers_available() and is_torch_available() and is_transformers_version('>=', '4.25.0')):\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tfrom ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline\r\nelse:\r\n\t\tfrom .pipeline_unclip import UnCLIPPipeline\r\n\t\tfrom .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline\r\n\t\tfrom .text_proj import UnCLIPTextProjModel\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305168,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif not isinstance(lowercase__ ,lowercase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"check_bouncy() accepts only integer arguments\"\"\" )\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tstr(lowercase__ )\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t\"\"\"\"\"\".join(sorted(lowercase__ ) )\r\n\t\t\t\t\t\treturn sorted_str_n != str_n and sorted_str_n[::-1] != str_n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ = 9_9 ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif not 0 < percent < 1_0_0:\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"solution() only accepts values from 0 to 100\"\"\" )\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t1\r\n\r\n\t\t\t\t\t\twhile True:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif check_bouncy(lowercase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tbouncy_num += 1\r\n\t\t\t\t\t\t\t\t\t\t\t\tif (bouncy_num / num) * 1_0_0 >= percent:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn num\r\n\t\t\t\t\t\t\t\t\t\t\t\tnum += 1\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tfrom doctest import testmod\r\n\r\n\t\ttestmod()\r\n\t\tprint(F'''{solution(99)}''')\r\n\r\n"},"code_codestyle":{"kind":"number","value":719,"string":"719"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif p < 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"p should not be less than 2!\"\"\" )\r\n\t\t\t\t\t\telif p == 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn True\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t4\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(1 << p) - 1\r\n\t\t\t\t\t\tfor _ in range(p - 2 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t((s * s) - 2) % m\r\n\t\t\t\t\t\treturn s == 0\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tprint(lucas_lehmer_test(7))\r\n\t\tprint(lucas_lehmer_test(11))\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305169,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport argparse\r\nimport json\r\nimport os\r\n\r\nimport evaluate\r\nimport torch\r\nfrom datasets import load_dataset\r\nfrom torch.optim import AdamW\r\nfrom torch.utils.data import DataLoader\r\nfrom transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed\r\n\r\nfrom accelerate import Accelerator, DistributedType\r\nfrom accelerate.utils.deepspeed import DummyOptim, DummyScheduler\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =16\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =32\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = 1_6 ,SCREAMING_SNAKE_CASE__ = \"bert-base-cased\" ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tAutoTokenizer.from_pretrained(UpperCamelCase__ )\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tload_dataset(\"\"\"glue\"\"\" ,\"\"\"mrpc\"\"\" )\r\n\r\n def tokenize_function(SCREAMING_SNAKE_CASE__ ):\r\n # max_length=None => use the model max length (it's actually the default)\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttokenizer(examples[\"\"\"sentence1\"\"\"] ,examples[\"\"\"sentence2\"\"\"] ,truncation=UpperCamelCase__ ,max_length=UpperCamelCase__ )\r\n return outputs\r\n\r\n # Apply the method we just defined to all the examples in all the splits of the dataset\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdatasets.map(\r\n UpperCamelCase__ ,batched=UpperCamelCase__ ,remove_columns=[\"\"\"idx\"\"\", \"\"\"sentence1\"\"\", \"\"\"sentence2\"\"\"] ,load_from_cache_file=UpperCamelCase__ )\r\n\r\n # We also rename the 'label' column to 'labels' which is the expected name for labels by the models of the\r\n # transformers library\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttokenized_datasets.rename_column(\"\"\"label\"\"\" ,\"\"\"labels\"\"\" )\r\n\r\n def collate_fn(SCREAMING_SNAKE_CASE__ ):\r\n # On TPU it's best to pad everything to the same length or training will be very slow.\r\n if accelerator.distributed_type == DistributedType.TPU:\r\n return tokenizer.pad(UpperCamelCase__ ,padding=\"\"\"max_length\"\"\" ,max_length=1_2_8 ,return_tensors=\"\"\"pt\"\"\" )\r\n return tokenizer.pad(UpperCamelCase__ ,padding=\"\"\"longest\"\"\" ,return_tensors=\"\"\"pt\"\"\" )\r\n\r\n # Instantiate dataloaders.\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tDataLoader(\r\n tokenized_datasets[\"\"\"train\"\"\"] ,shuffle=UpperCamelCase__ ,collate_fn=UpperCamelCase__ ,batch_size=UpperCamelCase__ )\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tDataLoader(\r\n tokenized_datasets[\"\"\"validation\"\"\"] ,shuffle=UpperCamelCase__ ,collate_fn=UpperCamelCase__ ,batch_size=UpperCamelCase__ )\r\n\r\n return train_dataloader, eval_dataloader\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n model.eval()\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t0\r\n for step, batch in enumerate(UpperCamelCase__ ):\r\n # We could avoid this line since we set the accelerator with `device_placement=True`.\r\n batch.to(accelerator.device )\r\n with torch.no_grad():\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tmodel(**UpperCamelCase__ )\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\toutputs.logits.argmax(dim=-1 )\r\n # It is slightly faster to call this once, than multiple times\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\taccelerator.gather(\r\n (predictions, batch[\"\"\"labels\"\"\"]) ) # If we are in a multiprocess environment, the last batch has duplicates\r\n if accelerator.use_distributed:\r\n if step == len(UpperCamelCase__ ) - 1:\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tpredictions[: len(eval_dataloader.dataset ) - samples_seen]\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\treferences[: len(eval_dataloader.dataset ) - samples_seen]\r\n else:\r\n samples_seen += references.shape[0]\r\n metric.add_batch(\r\n predictions=UpperCamelCase__ ,references=UpperCamelCase__ ,)\r\n\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tmetric.compute()\r\n return eval_metric[\"accuracy\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tAccelerator()\r\n\r\n # Sample hyper-parameters for learning rate, batch size, seed and a few other HPs\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tconfig[\"\"\"lr\"\"\"]\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\tint(config[\"\"\"num_epochs\"\"\"] )\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tint(config[\"\"\"seed\"\"\"] )\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tint(config[\"\"\"batch_size\"\"\"] )\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\targs.model_name_or_path\r\n\r\n set_seed(UpperCamelCase__ )\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tget_dataloaders(UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ )\r\n\r\n # Instantiate the model (we build the model here so that the seed also control new weights initialization)\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tAutoModelForSequenceClassification.from_pretrained(UpperCamelCase__ ,return_dict=UpperCamelCase__ )\r\n\r\n # Instantiate optimizer\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t(\r\n AdamW\r\n if accelerator.state.deepspeed_plugin is None\r\n or \"\"\"optimizer\"\"\" not in accelerator.state.deepspeed_plugin.deepspeed_config\r\n else DummyOptim\r\n )\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\toptimizer_cls(params=model.parameters() ,lr=UpperCamelCase__ )\r\n\r\n if accelerator.state.deepspeed_plugin is not None:\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\taccelerator.state.deepspeed_plugin.deepspeed_config[\r\n \"\"\"gradient_accumulation_steps\"\"\"\r\n ]\r\n else:\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\t1\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t(len(UpperCamelCase__ ) * num_epochs) // gradient_accumulation_steps\r\n\r\n # Instantiate scheduler\r\n if (\r\n accelerator.state.deepspeed_plugin is None\r\n or \"scheduler\" not in accelerator.state.deepspeed_plugin.deepspeed_config\r\n ):\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tget_linear_schedule_with_warmup(\r\n optimizer=UpperCamelCase__ ,num_warmup_steps=0 ,num_training_steps=UpperCamelCase__ ,)\r\n else:\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tDummyScheduler(UpperCamelCase__ ,total_num_steps=UpperCamelCase__ ,warmup_num_steps=0 )\r\n\r\n # Prepare everything\r\n # There is no specific order to remember, we just need to unpack the objects in the same order we gave them to the\r\n # prepare method.\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\taccelerator.prepare(\r\n UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ )\r\n\r\n # We need to keep track of how many total steps we have iterated over\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t0\r\n # We also need to keep track of the stating epoch so files are named properly\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\t0\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tevaluate.load(\"\"\"glue\"\"\" ,\"\"\"mrpc\"\"\" )\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tnum_epochs\r\n\r\n if args.partial_train_epoch is not None:\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\targs.partial_train_epoch\r\n\r\n if args.resume_from_checkpoint:\r\n accelerator.load_state(args.resume_from_checkpoint )\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\targs.resume_from_checkpoint.split(\"\"\"epoch_\"\"\" )[1]\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t\"\"\"\"\"\"\r\n for char in epoch_string:\r\n if char.isdigit():\r\n state_epoch_num += char\r\n else:\r\n break\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tint(UpperCamelCase__ ) + 1\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\tevaluation_loop(UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ )\r\n accelerator.print(\"\"\"resumed checkpoint performance:\"\"\" ,UpperCamelCase__ )\r\n accelerator.print(\"\"\"resumed checkpoint's scheduler's lr:\"\"\" ,lr_scheduler.get_lr()[0] )\r\n accelerator.print(\"\"\"resumed optimizers's lr:\"\"\" ,optimizer.param_groups[0][\"\"\"lr\"\"\"] )\r\n with open(os.path.join(args.output_dir ,F\"\"\"state_{starting_epoch-1}.json\"\"\" ) ,\"\"\"r\"\"\" ) as f:\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tjson.load(UpperCamelCase__ )\r\n assert resumed_state[\"accuracy\"] == accuracy, \"Accuracy mismatch, loading from checkpoint failed\"\r\n assert (\r\n resumed_state[\"lr\"] == lr_scheduler.get_lr()[0]\r\n ), \"Scheduler learning rate mismatch, loading from checkpoint failed\"\r\n assert (\r\n resumed_state[\"optimizer_lr\"] == optimizer.param_groups[0][\"lr\"]\r\n ), \"Optimizer learning rate mismatch, loading from checkpoint failed\"\r\n assert resumed_state[\"epoch\"] == starting_epoch - 1, \"Epoch mismatch, loading from checkpoint failed\"\r\n return\r\n\r\n # Now we train the model\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\t{}\r\n for epoch in range(UpperCamelCase__ ,UpperCamelCase__ ):\r\n model.train()\r\n for step, batch in enumerate(UpperCamelCase__ ):\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tmodel(**UpperCamelCase__ )\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\toutputs.loss\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tloss / gradient_accumulation_steps\r\n accelerator.backward(UpperCamelCase__ )\r\n if step % gradient_accumulation_steps == 0:\r\n optimizer.step()\r\n lr_scheduler.step()\r\n optimizer.zero_grad()\r\n\r\n overall_step += 1\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tF\"\"\"epoch_{epoch}\"\"\"\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tos.path.join(args.output_dir ,UpperCamelCase__ )\r\n accelerator.save_state(UpperCamelCase__ )\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tevaluation_loop(UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ )\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\taccuracy\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tlr_scheduler.get_lr()[0]\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\toptimizer.param_groups[0][\"\"\"lr\"\"\"]\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tepoch\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\toverall_step\r\n accelerator.print(F\"\"\"epoch {epoch}:\"\"\" ,UpperCamelCase__ )\r\n\r\n accelerator.wait_for_everyone()\r\n if accelerator.is_main_process:\r\n with open(os.path.join(args.output_dir ,F\"\"\"state_{epoch}.json\"\"\" ) ,\"\"\"w\"\"\" ) as f:\r\n json.dump(UpperCamelCase__ ,UpperCamelCase__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\targparse.ArgumentParser(description=\"\"\"Simple example of training script tracking peak GPU memory usage.\"\"\" )\r\n parser.add_argument(\r\n \"\"\"--model_name_or_path\"\"\" ,type=UpperCamelCase__ ,default=\"\"\"bert-base-cased\"\"\" ,help=\"\"\"Path to pretrained model or model identifier from huggingface.co/models.\"\"\" ,required=UpperCamelCase__ ,)\r\n parser.add_argument(\r\n \"\"\"--output_dir\"\"\" ,type=UpperCamelCase__ ,default=\"\"\".\"\"\" ,help=\"\"\"Optional save directory where all checkpoint folders will be stored. Default is the current working directory.\"\"\" ,)\r\n parser.add_argument(\r\n \"\"\"--resume_from_checkpoint\"\"\" ,type=UpperCamelCase__ ,default=UpperCamelCase__ ,help=\"\"\"If the training should continue from a checkpoint folder.\"\"\" ,)\r\n parser.add_argument(\r\n \"\"\"--partial_train_epoch\"\"\" ,type=UpperCamelCase__ ,default=UpperCamelCase__ ,help=\"\"\"If passed, the training will stop after this number of epochs.\"\"\" ,)\r\n parser.add_argument(\r\n \"\"\"--num_epochs\"\"\" ,type=UpperCamelCase__ ,default=2 ,help=\"\"\"Number of train epochs.\"\"\" ,)\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tparser.parse_args()\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\t{\"\"\"lr\"\"\": 2e-5, \"\"\"num_epochs\"\"\": args.num_epochs, \"\"\"seed\"\"\": 4_2, \"\"\"batch_size\"\"\": 1_6}\r\n\r\n training_function(UpperCamelCase__ ,UpperCamelCase__ )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n main()\r\n\r\n"},"code_codestyle":{"kind":"number","value":720,"string":"720"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport random\r\nimport unittest\r\n\r\nimport torch\r\n\r\nfrom diffusers import IFImgaImgSuperResolutionPipeline\r\nfrom diffusers.utils import floats_tensor\r\nfrom diffusers.utils.import_utils import is_xformers_available\r\nfrom diffusers.utils.testing_utils import skip_mps, torch_device\r\n\r\nfrom ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS\r\nfrom ..test_pipelines_common import PipelineTesterMixin\r\nfrom . import IFPipelineTesterMixin\r\n\r\n\r\n\r\n\r\n\r\n\r\n@skip_mps\r\nclass _a (\t\t\tsnake_case_ , snake_case_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Optional[Any] =\t\tIFImgaImgSuperResolutionPipeline\r\n\t\t\t_UpperCamelCase: int =\t\tTEXT_GUIDED_IMAGE_VARIATION_PARAMS - {\"width\", \"height\"}\r\n\t\t\t_UpperCamelCase: Optional[int] =\t\tTEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({\"original_image\"} )\r\n\t\t\t_UpperCamelCase: List[str] =\t\tPipelineTesterMixin.required_optional_params - {\"latents\"}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self._get_superresolution_dummy_components()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=0 ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tif str(lowercase_ ).startswith(\"\"\"mps\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttorch.manual_seed(lowercase_ )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttorch.Generator(device=lowercase_ ).manual_seed(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tfloats_tensor((1, 3, 32, 32)\t\t\t\t\t\t, rng=random.Random(lowercase_ ) ).to(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tfloats_tensor((1, 3, 16, 16)\t\t\t\t\t\t, rng=random.Random(lowercase_ ) ).to(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"prompt\"\"\": \"\"\"A painting of a squirrel eating a burger\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"image\"\"\": image,\r\n\t\t\t\t\t\t\t\t\t \"\"\"original_image\"\"\": original_image,\r\n\t\t\t\t\t\t\t\t\t \"\"\"generator\"\"\": generator,\r\n\t\t\t\t\t\t\t\t\t \"\"\"num_inference_steps\"\"\": 2,\r\n\t\t\t\t\t\t\t\t\t \"\"\"output_type\"\"\": \"\"\"numpy\"\"\",\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\treturn inputs\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skipIf(\r\n\t\t\t torch_device != \"\"\"cuda\"\"\" or not is_xformers_available()\t\t\t\t\t\t, reason=\"\"\"XFormers attention is only available with CUDA and `xformers` installed\"\"\"\t\t\t\t\t\t, )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tself._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tself._test_save_load_optional_components()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skipIf(torch_device != \"\"\"cuda\"\"\"\t\t\t\t\t\t, reason=\"\"\"float16 requires CUDA\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\t# Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder\r\n\t\t\t\t\t\t\t\t\tsuper().test_save_load_floataa(expected_max_diff=1e-1 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tself._test_attention_slicing_forward_pass(expected_max_diff=1e-2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tself._test_save_load_local()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tself._test_inference_batch_single_identical(\r\n\t\t\t\t\t\t\t\t\t expected_max_diff=1e-2\t\t\t\t\t\t, )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305170,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport os\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n with open(os.path.dirname(_UpperCamelCase ) + \"\"\"/grid.txt\"\"\" ) as f:\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\t[] # noqa: E741\r\n for _ in range(2_0 ):\r\n l.append([int(_UpperCamelCase ) for x in f.readline().split()] )\r\n\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t0\r\n\r\n # right\r\n for i in range(2_0 ):\r\n for j in range(1_7 ):\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tl[i][j] * l[i][j + 1] * l[i][j + 2] * l[i][j + 3]\r\n if temp > maximum:\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttemp\r\n\r\n # down\r\n for i in range(1_7 ):\r\n for j in range(2_0 ):\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tl[i][j] * l[i + 1][j] * l[i + 2][j] * l[i + 3][j]\r\n if temp > maximum:\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttemp\r\n\r\n # diagonal 1\r\n for i in range(1_7 ):\r\n for j in range(1_7 ):\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tl[i][j] * l[i + 1][j + 1] * l[i + 2][j + 2] * l[i + 3][j + 3]\r\n if temp > maximum:\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttemp\r\n\r\n # diagonal 2\r\n for i in range(1_7 ):\r\n for j in range(3 ,2_0 ):\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tl[i][j] * l[i + 1][j - 1] * l[i + 2][j - 2] * l[i + 3][j - 3]\r\n if temp > maximum:\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttemp\r\n return maximum\r\n\r\n\r\nif __name__ == \"__main__\":\r\n print(solution())\r\n\r\n"},"code_codestyle":{"kind":"number","value":721,"string":"721"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\t\"llama\"\r\n\t\t\t_UpperCamelCase: List[str] =\t\t[\"past_key_values\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=32000\t\t\t\t\t\t, lowercase_=4096\t\t\t\t\t\t, lowercase_=11008\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=\"silu\"\t\t\t\t\t\t, lowercase_=2048\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-6\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmax_position_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\thidden_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tintermediate_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnum_hidden_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_attention_heads\r\n\r\n\t\t\t\t\t\t\t\t\t# for backward compatibility\r\n\t\t\t\t\t\t\t\t\tif num_key_value_heads is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_attention_heads\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_key_value_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\thidden_act\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tinitializer_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\trms_norm_eps\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tpretraining_tp\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tuse_cache\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\trope_scaling\r\n\t\t\t\t\t\t\t\t\tself._rope_scaling_validation()\r\n\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(\r\n\t\t\t\t\t\t\t\t\t pad_token_id=lowercase_\t\t\t\t\t\t, bos_token_id=lowercase_\t\t\t\t\t\t, eos_token_id=lowercase_\t\t\t\t\t\t, tie_word_embeddings=lowercase_\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tif self.rope_scaling is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\r\n\t\t\t\t\t\t\t\t\tif not isinstance(self.rope_scaling\t\t\t\t\t\t, lowercase_ ) or len(self.rope_scaling ) != 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"got {self.rope_scaling}\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.rope_scaling.get(\"\"\"type\"\"\"\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.rope_scaling.get(\"\"\"factor\"\"\"\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tif rope_scaling_type is None or rope_scaling_type not in [\"linear\", \"dynamic\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}\"\"\" )\r\n\t\t\t\t\t\t\t\t\tif rope_scaling_factor is None or not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ) or rope_scaling_factor <= 1.0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(f\"\"\"`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}\"\"\" )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305171,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\r\nimport argparse\r\nimport json\r\nfrom collections import OrderedDict\r\nfrom functools import partial\r\nfrom pathlib import Path\r\n\r\nimport timm\r\nimport torch\r\nfrom huggingface_hub import hf_hub_download\r\n\r\nfrom transformers import LevitConfig, LevitForImageClassificationWithTeacher, LevitImageProcessor\r\nfrom transformers.utils import logging\r\n\r\n\r\nlogging.set_verbosity_info()\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =logging.get_logger()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = True ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tprint(F\"\"\"Converting {name}...\"\"\" )\r\n\r\n\t\t\t\t\t\twith torch.no_grad():\r\n\t\t\t\t\t\t\t\t\t\t\t\tif hidden_sizes == 1_2_8:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif name[-1] == \"S\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttimm.create_model(\"\"\"levit_128s\"\"\" ,pretrained=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttimm.create_model(\"\"\"levit_128\"\"\" ,pretrained=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif hidden_sizes == 1_9_2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttimm.create_model(\"\"\"levit_192\"\"\" ,pretrained=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif hidden_sizes == 2_5_6:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttimm.create_model(\"\"\"levit_256\"\"\" ,pretrained=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif hidden_sizes == 3_8_4:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttimm.create_model(\"\"\"levit_384\"\"\" ,pretrained=SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tfrom_model.eval()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tLevitForImageClassificationWithTeacher(SCREAMING_SNAKE_CASE__ ).eval()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tOrderedDict()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tfrom_model.state_dict()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tlist(from_model.state_dict().keys() )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tlist(our_model.state_dict().keys() )\r\n\t\t\t\t\t\t\t\t\t\t\t\tprint(len(SCREAMING_SNAKE_CASE__ ) ,len(SCREAMING_SNAKE_CASE__ ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor i in range(len(SCREAMING_SNAKE_CASE__ ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tweights[og_keys[i]]\r\n\t\t\t\t\t\t\t\t\t\t\t\tour_model.load_state_dict(SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttorch.randn((2, 3, 2_2_4, 2_2_4) )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tfrom_model(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tour_model(SCREAMING_SNAKE_CASE__ ).logits\r\n\r\n\t\t\t\t\t\tassert torch.allclose(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ), \"The model logits don't match the original one.\"\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tname\r\n\t\t\t\t\t\tprint(SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\tif push_to_hub:\r\n\t\t\t\t\t\t\t\t\t\t\t\tour_model.save_pretrained(save_directory / checkpoint_name )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tLevitImageProcessor()\r\n\t\t\t\t\t\t\t\t\t\t\t\timage_processor.save_pretrained(save_directory / checkpoint_name )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Pushed {checkpoint_name}\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = None ,SCREAMING_SNAKE_CASE__ = True ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t\"\"\"imagenet-1k-id2label.json\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t1_0_0_0\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t(1, num_labels)\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t\"\"\"huggingface/label-files\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tnum_labels\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tjson.load(open(hf_hub_download(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,repo_type=\"\"\"dataset\"\"\" ) ,\"\"\"r\"\"\" ) )\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{int(SCREAMING_SNAKE_CASE__ ): v for k, v in idalabel.items()}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tidalabel\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{v: k for k, v in idalabel.items()}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tpartial(SCREAMING_SNAKE_CASE__ ,num_labels=SCREAMING_SNAKE_CASE__ ,idalabel=SCREAMING_SNAKE_CASE__ ,labelaid=SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t \"\"\"levit-128S\"\"\": 1_2_8,\r\n\t\t\t\t\t\t \"\"\"levit-128\"\"\": 1_2_8,\r\n\t\t\t\t\t\t \"\"\"levit-192\"\"\": 1_9_2,\r\n\t\t\t\t\t\t \"\"\"levit-256\"\"\": 2_5_6,\r\n\t\t\t\t\t\t \"\"\"levit-384\"\"\": 3_8_4,\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t \"\"\"levit-128S\"\"\": ImageNetPreTrainedConfig(\r\n\t\t\t\t\t\t hidden_sizes=[1_2_8, 2_5_6, 3_8_4] ,num_attention_heads=[4, 6, 8] ,depths=[2, 3, 4] ,key_dim=[1_6, 1_6, 1_6] ,drop_path_rate=0 ,),\r\n\t\t\t\t\t\t \"\"\"levit-128\"\"\": ImageNetPreTrainedConfig(\r\n\t\t\t\t\t\t hidden_sizes=[1_2_8, 2_5_6, 3_8_4] ,num_attention_heads=[4, 8, 1_2] ,depths=[4, 4, 4] ,key_dim=[1_6, 1_6, 1_6] ,drop_path_rate=0 ,),\r\n\t\t\t\t\t\t \"\"\"levit-192\"\"\": ImageNetPreTrainedConfig(\r\n\t\t\t\t\t\t hidden_sizes=[1_9_2, 2_8_8, 3_8_4] ,num_attention_heads=[3, 5, 6] ,depths=[4, 4, 4] ,key_dim=[3_2, 3_2, 3_2] ,drop_path_rate=0 ,),\r\n\t\t\t\t\t\t \"\"\"levit-256\"\"\": ImageNetPreTrainedConfig(\r\n\t\t\t\t\t\t hidden_sizes=[2_5_6, 3_8_4, 5_1_2] ,num_attention_heads=[4, 6, 8] ,depths=[4, 4, 4] ,key_dim=[3_2, 3_2, 3_2] ,drop_path_rate=0 ,),\r\n\t\t\t\t\t\t \"\"\"levit-384\"\"\": ImageNetPreTrainedConfig(\r\n\t\t\t\t\t\t hidden_sizes=[3_8_4, 5_1_2, 7_6_8] ,num_attention_heads=[6, 9, 1_2] ,depths=[4, 4, 4] ,key_dim=[3_2, 3_2, 3_2] ,drop_path_rate=0.1 ,),\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tif model_name:\r\n\t\t\t\t\t\t\t\t\t\t\t\tconvert_weight_and_push(\r\n\t\t\t\t\t\t\t\t\t\t\t\t names_to_hidden_sizes[model_name] ,SCREAMING_SNAKE_CASE__ ,names_to_config[model_name] ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor model_name, config in names_to_config.items():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tconvert_weight_and_push(names_to_hidden_sizes[model_name] ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\treturn config, expected_shape\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =argparse.ArgumentParser()\r\n\t\t# Required parameters\r\n\t\tparser.add_argument(\r\n\t\t '--model_name',\r\n\t\t default=None,\r\n\t\t type=str,\r\n\t\t help='The name of the model you wish to convert, it must be one of the supported Levit* architecture,',\r\n\t\t)\r\n\t\tparser.add_argument(\r\n\t\t '--pytorch_dump_folder_path',\r\n\t\t default='levit-dump-folder/',\r\n\t\t type=Path,\r\n\t\t required=False,\r\n\t\t help='Path to the output PyTorch model directory.',\r\n\t\t)\r\n\t\tparser.add_argument('--push_to_hub', action='store_true', help='Push model and image processor to the hub')\r\n\t\tparser.add_argument(\r\n\t\t '--no-push_to_hub',\r\n\t\t dest='push_to_hub',\r\n\t\t action='store_false',\r\n\t\t help='Do not push model and image processor to the hub',\r\n\t\t)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =parser.parse_args()\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] =args.pytorch_dump_folder_path\r\n\t\tpytorch_dump_folder_path.mkdir(exist_ok=True, parents=True)\r\n\t\tconvert_weights_and_push(pytorch_dump_folder_path, args.model_name, args.push_to_hub)\r\n\r\n"},"code_codestyle":{"kind":"number","value":700,"string":"700"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom collections import OrderedDict\r\nfrom typing import Mapping\r\n\r\nfrom packaging import version\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...onnx import OnnxConfig\r\nfrom ...utils import logging\r\nfrom ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={\r\n 'microsoft/swin-tiny-patch4-window7-224': (\r\n 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json'\r\n ),\r\n # See all Swin models at https://huggingface.co/models?filter=swin\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ , snake_case_ ):\r\n\t\t\t_UpperCamelCase: int =\t\t\"swin\"\r\n\r\n\t\t\t_UpperCamelCase: str =\t\t{\r\n\t\t\t \"num_attention_heads\": \"num_heads\",\r\n\t\t\t \"num_hidden_layers\": \"num_layers\",\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=224\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=96\t\t\t\t\t\t, lowercase_=[2, 2, 6, 2]\t\t\t\t\t\t, lowercase_=[3, 6, 12, 24]\t\t\t\t\t\t, lowercase_=7\t\t\t\t\t\t, lowercase_=4.0\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=\"gelu\"\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-5\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(**lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tpatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tembed_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdepths\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tlen(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\twindow_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tmlp_ratio\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tqkv_bias\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\thidden_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tattention_probs_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdrop_path_rate\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\thidden_act\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tuse_absolute_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tlayer_norm_eps\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tinitializer_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tencoder_stride\r\n\t\t\t\t\t\t\t\t\t# we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel\r\n\t\t\t\t\t\t\t\t\t# this indicates the channel dimension after the last stage of the model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tint(embed_dim * 2 ** (len(lowercase_ ) - 1) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\"\"\"stem\"\"\"] + [f\"\"\"stage{idx}\"\"\" for idx in range(1\t\t\t\t\t\t, len(lowercase_ ) + 1 )]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tget_aligned_output_features_output_indices(\r\n\t\t\t\t\t\t\t\t\t out_features=lowercase_\t\t\t\t\t\t, out_indices=lowercase_\t\t\t\t\t\t, stage_names=self.stage_names )\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: int =\t\tversion.parse(\"1.11\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tMapping[str, Mapping[int, str]]:\r\n\t\t\t\t\t\t\t\t\treturn OrderedDict(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t (\"\"\"pixel_values\"\"\", {0: \"\"\"batch\"\"\", 1: \"\"\"num_channels\"\"\", 2: \"\"\"height\"\"\", 3: \"\"\"width\"\"\"}),\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tfloat:\r\n\t\t\t\t\t\t\t\t\treturn 1e-4\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305172,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport unittest\r\n\r\nfrom transformers import GPTSwaTokenizer\r\nfrom transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow\r\n\r\nfrom ...test_tokenization_common import TokenizerTesterMixin\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny = get_tests_dir('fixtures/test_sentencepiece_with_bytefallback.model')\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_sentencepiece\r\n@require_tokenizers\r\nclass _a (\t\t\tsnake_case_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: List[Any] =\t\tGPTSwaTokenizer\r\n\t\t\t_UpperCamelCase: str =\t\tFalse\r\n\t\t\t_UpperCamelCase: List[str] =\t\tTrue\r\n\t\t\t_UpperCamelCase: List[str] =\t\tFalse\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tsuper().setUp()\r\n\r\n\t\t\t\t\t\t\t\t\t# We have a SentencePiece fixture for testing\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tGPTSwaTokenizer(lowercase_\t\t\t\t\t\t, eos_token=\"\"\"\"\"\"\t\t\t\t\t\t, bos_token=\"\"\"\"\"\"\t\t\t\t\t\t, pad_token=\"\"\"\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\ttokenizer.save_pretrained(self.tmpdirname )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t\"\"\"This is a test\"\"\"\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t\"\"\"This is a test\"\"\"\r\n\t\t\t\t\t\t\t\t\treturn input_text, output_text\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t\"\"\"\"\"\"\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t1\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(self.get_tokenizer()._convert_token_to_id(lowercase_ )\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(self.get_tokenizer()._convert_id_to_token(lowercase_ )\t\t\t\t\t\t, lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tlist(self.get_tokenizer().get_vocab().keys() )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(vocab_keys[0]\t\t\t\t\t\t, \"\"\"\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(vocab_keys[1]\t\t\t\t\t\t, \"\"\"\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(vocab_keys[-1]\t\t\t\t\t\t, \"\"\"j\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(len(lowercase_ )\t\t\t\t\t\t, 2000 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(self.get_tokenizer().vocab_size\t\t\t\t\t\t, 2000 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tGPTSwaTokenizer(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer.tokenize(\"\"\"This is a test\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertListEqual(lowercase_\t\t\t\t\t\t, [\"\"\"▁This\"\"\", \"\"\"▁is\"\"\", \"\"\"▁a\"\"\", \"\"\"▁t\"\"\", \"\"\"est\"\"\"] )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertListEqual(tokenizer.convert_tokens_to_ids(lowercase_ )\t\t\t\t\t\t, [465, 287, 265, 631, 842] )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttokenizer.tokenize(\"\"\"I was born in 92000, and this is falsé.\"\"\" )\r\n\t\t\t\t\t\t\t\t\t# fmt: off\r\n\t\t\t\t\t\t\t\t\tself.assertListEqual(\r\n\t\t\t\t\t\t\t\t\t lowercase_\t\t\t\t\t\t, [\"\"\"▁I\"\"\", \"\"\"▁was\"\"\", \"\"\"▁bor\"\"\", \"\"\"n\"\"\", \"\"\"▁in\"\"\", \"\"\"▁\"\"\", \"\"\"<0x39>\"\"\", \"\"\"2\"\"\", \"\"\"0\"\"\", \"\"\"0\"\"\", \"\"\"0\"\"\", \"\"\",\"\"\", \"\"\"▁and\"\"\", \"\"\"▁this\"\"\", \"\"\"▁is\"\"\", \"\"\"▁f\"\"\", \"\"\"al\"\"\", \"\"\"s\"\"\", \"\"\"<0xC3>\"\"\", \"\"\"<0xA9>\"\"\", \"\"\".\"\"\"]\t\t\t\t\t\t, )\r\n\t\t\t\t\t\t\t\t\t# fmt: on\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttokenizer.convert_tokens_to_ids(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tself.assertListEqual(\r\n\t\t\t\t\t\t\t\t\t lowercase_\t\t\t\t\t\t, [262, 272, 1525, 286, 271, 268, 60, 916, 633, 633, 633, 259, 266, 301, 287, 384, 367, 263, 198, 172, 260]\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttokenizer.convert_ids_to_tokens(lowercase_ )\r\n\t\t\t\t\t\t\t\t\t# fmt: off\r\n\t\t\t\t\t\t\t\t\tself.assertListEqual(\r\n\t\t\t\t\t\t\t\t\t lowercase_\t\t\t\t\t\t, [\"\"\"▁I\"\"\", \"\"\"▁was\"\"\", \"\"\"▁bor\"\"\", \"\"\"n\"\"\", \"\"\"▁in\"\"\", \"\"\"▁\"\"\", \"\"\"<0x39>\"\"\", \"\"\"2\"\"\", \"\"\"0\"\"\", \"\"\"0\"\"\", \"\"\"0\"\"\", \"\"\",\"\"\", \"\"\"▁and\"\"\", \"\"\"▁this\"\"\", \"\"\"▁is\"\"\", \"\"\"▁f\"\"\", \"\"\"al\"\"\", \"\"\"s\"\"\", \"\"\"<0xC3>\"\"\", \"\"\"<0xA9>\"\"\", \"\"\".\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t# fmt: on\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tGPTSwaTokenizer(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t[\"\"\"This is a test\"\"\", \"\"\"I was born in 92000, and this is falsé.\"\"\"]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t [465, 287, 265, 631, 842],\r\n\t\t\t\t\t\t\t\t\t [262, 272, 1525, 286, 271, 268, 60, 916, 633, 633, 633, 259, 266, 301, 287, 384, 367, 263, 198, 172, 260],\r\n\t\t\t\t\t\t\t\t\t]\r\n\r\n\t\t\t\t\t\t\t\t\t# Test that encode_fast returns the same as tokenize + convert_tokens_to_ids\r\n\t\t\t\t\t\t\t\t\tfor text, expected_ids in zip(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(tokenizer.encode_fast(lowercase_ )\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test that decode_fast returns the input text\r\n\t\t\t\t\t\t\t\t\tfor text, token_ids in zip(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(tokenizer.decode_fast(lowercase_ )\t\t\t\t\t\t, lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@slow\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t \"\"\"<|python|>def fibonacci(n)\\n if n < 0:\\n print(\\'Incorrect input\\')\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"Hey there, how are you doing this fine day?\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"This is a text with a trailing spaces followed by a dot .\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"Häj sväjs lillebrör! =)\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"Det är inget fel på Mr. Cool\"\"\",\r\n\t\t\t\t\t\t\t\t\t]\r\n\r\n\t\t\t\t\t\t\t\t\t# fmt: off\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t{\"\"\"input_ids\"\"\": [[63423, 5, 6811, 14954, 282, 816, 3821, 63466, 63425, 63462, 18, 63978, 678, 301, 1320, 63423, 63455, 63458, 18, 63982, 4246, 3940, 1901, 47789, 5547, 18994], [19630, 1100, 63446, 1342, 633, 544, 4488, 593, 5102, 2416, 63495, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1652, 428, 268, 1936, 515, 268, 58593, 22413, 9106, 546, 268, 33213, 63979, 698, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [55130, 63450, 924, 63449, 2249, 4062, 1558, 318, 63504, 21498, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [509, 377, 2827, 2559, 332, 6575, 63443, 26801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], \"\"\"token_type_ids\"\"\": [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], \"\"\"attention_mask\"\"\": [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]}\r\n\t\t\t\t\t\t\t\t\t# fmt: on\r\n\t\t\t\t\t\t\t\t\tself.tokenizer_integration_test_util(\r\n\t\t\t\t\t\t\t\t\t expected_encoding=lowercase_\t\t\t\t\t\t, model_name=\"\"\"AI-Sweden/gpt-sw3-126m\"\"\"\t\t\t\t\t\t, sequences=lowercase_\t\t\t\t\t\t, )\r\n\r\n"},"code_codestyle":{"kind":"number","value":701,"string":"701"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr ={\r\n 'Pillow': 'Pillow<10.0.0',\r\n 'accelerate': 'accelerate>=0.20.3',\r\n 'av': 'av==9.2.0',\r\n 'beautifulsoup4': 'beautifulsoup4',\r\n 'black': 'black~=23.1',\r\n 'codecarbon': 'codecarbon==1.2.0',\r\n 'cookiecutter': 'cookiecutter==1.7.3',\r\n 'dataclasses': 'dataclasses',\r\n 'datasets': 'datasets!=2.5.0',\r\n 'decord': 'decord==0.6.0',\r\n 'deepspeed': 'deepspeed>=0.9.3',\r\n 'diffusers': 'diffusers',\r\n 'dill': 'dill<0.3.5',\r\n 'evaluate': 'evaluate>=0.2.0',\r\n 'fairscale': 'fairscale>0.3',\r\n 'faiss-cpu': 'faiss-cpu',\r\n 'fastapi': 'fastapi',\r\n 'filelock': 'filelock',\r\n 'flax': 'flax>=0.4.1,<=0.7.0',\r\n 'ftfy': 'ftfy',\r\n 'fugashi': 'fugashi>=1.0',\r\n 'GitPython': 'GitPython<3.1.19',\r\n 'hf-doc-builder': 'hf-doc-builder>=0.3.0',\r\n 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0',\r\n 'importlib_metadata': 'importlib_metadata',\r\n 'ipadic': 'ipadic>=1.0.0,<2.0',\r\n 'isort': 'isort>=5.5.4',\r\n 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13',\r\n 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13',\r\n 'jieba': 'jieba',\r\n 'kenlm': 'kenlm',\r\n 'keras-nlp': 'keras-nlp>=0.3.1',\r\n 'librosa': 'librosa',\r\n 'nltk': 'nltk',\r\n 'natten': 'natten>=0.14.6',\r\n 'numpy': 'numpy>=1.17',\r\n 'onnxconverter-common': 'onnxconverter-common',\r\n 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2',\r\n 'onnxruntime': 'onnxruntime>=1.4.0',\r\n 'opencv-python': 'opencv-python',\r\n 'optuna': 'optuna',\r\n 'optax': 'optax>=0.0.8,<=0.1.4',\r\n 'packaging': 'packaging>=20.0',\r\n 'parameterized': 'parameterized',\r\n 'phonemizer': 'phonemizer',\r\n 'protobuf': 'protobuf',\r\n 'psutil': 'psutil',\r\n 'pyyaml': 'pyyaml>=5.1',\r\n 'pydantic': 'pydantic<2',\r\n 'pytest': 'pytest>=7.2.0',\r\n 'pytest-timeout': 'pytest-timeout',\r\n 'pytest-xdist': 'pytest-xdist',\r\n 'python': 'python>=3.8.0',\r\n 'ray[tune]': 'ray[tune]',\r\n 'regex': 'regex!=2019.12.17',\r\n 'requests': 'requests',\r\n 'rhoknp': 'rhoknp>=1.1.0,<1.3.1',\r\n 'rjieba': 'rjieba',\r\n 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1',\r\n 'ruff': 'ruff>=0.0.241,<=0.0.259',\r\n 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0',\r\n 'sacremoses': 'sacremoses',\r\n 'safetensors': 'safetensors>=0.3.1',\r\n 'sagemaker': 'sagemaker>=2.31.0',\r\n 'scikit-learn': 'scikit-learn',\r\n 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92',\r\n 'sigopt': 'sigopt',\r\n 'starlette': 'starlette',\r\n 'sudachipy': 'sudachipy>=0.6.6',\r\n 'sudachidict_core': 'sudachidict_core>=20220729',\r\n 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14',\r\n 'tensorflow': 'tensorflow>=2.6,<2.14',\r\n 'tensorflow-text': 'tensorflow-text<2.14',\r\n 'tf2onnx': 'tf2onnx',\r\n 'timeout-decorator': 'timeout-decorator',\r\n 'timm': 'timm',\r\n 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14',\r\n 'torch': 'torch>=1.9,!=1.12.0',\r\n 'torchaudio': 'torchaudio',\r\n 'torchvision': 'torchvision',\r\n 'pyctcdecode': 'pyctcdecode>=0.4.0',\r\n 'tqdm': 'tqdm>=4.27',\r\n 'unidic': 'unidic>=1.0.2',\r\n 'unidic_lite': 'unidic_lite>=1.0.7',\r\n 'urllib3': 'urllib3<2.0.0',\r\n 'uvicorn': 'uvicorn',\r\n}\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305173,"cells":{"code":{"kind":"string","value":"\r\n\r\nimport unittest\r\n\r\nimport numpy as np\r\n\r\nfrom transformers.testing_utils import require_torch, require_vision\r\nfrom transformers.utils import is_torch_available, is_vision_available\r\n\r\nfrom ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs\r\n\r\n\r\nif is_torch_available():\r\n\t\timport torch\r\n\r\nif is_vision_available():\r\n\t\tfrom PIL import Image\r\n\r\n\t\tfrom transformers import LevitImageProcessor\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=7\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=18\t\t\t\t\t\t, lowercase_=30\t\t\t\t\t\t, lowercase_=400\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=[0.5, 0.5, 0.5]\t\t\t\t\t\t, lowercase_=[0.5, 0.5, 0.5]\t\t\t\t\t\t, ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tsize if size is not None else {\"\"\"shortest_edge\"\"\": 18}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tcrop_size if crop_size is not None else {\"\"\"height\"\"\": 18, \"\"\"width\"\"\": 18}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tparent\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tbatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\timage_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmin_resolution\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmax_resolution\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdo_resize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tsize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tdo_center_crop\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tcrop_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdo_normalize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\timage_mean\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\timage_std\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn {\r\n\t\t\t\t\t\t\t\t\t \"image_mean\": self.image_mean,\r\n\t\t\t\t\t\t\t\t\t \"image_std\": self.image_std,\r\n\t\t\t\t\t\t\t\t\t \"do_normalize\": self.do_normalize,\r\n\t\t\t\t\t\t\t\t\t \"do_resize\": self.do_resize,\r\n\t\t\t\t\t\t\t\t\t \"do_center_crop\": self.do_center_crop,\r\n\t\t\t\t\t\t\t\t\t \"size\": self.size,\r\n\t\t\t\t\t\t\t\t\t \"crop_size\": self.crop_size,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_torch\r\n@require_vision\r\nclass _a (\t\t\t__snake_case , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: List[str] =\t\tLevitImageProcessor if is_vision_available() else None\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tLevitImageProcessingTester(self )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.image_processor_tester.prepare_image_processor_dict()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(__UpperCamelCase\t\t\t\t\t\t, \"\"\"image_mean\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(__UpperCamelCase\t\t\t\t\t\t, \"\"\"image_std\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(__UpperCamelCase\t\t\t\t\t\t, \"\"\"do_normalize\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(__UpperCamelCase\t\t\t\t\t\t, \"\"\"do_resize\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(__UpperCamelCase\t\t\t\t\t\t, \"\"\"do_center_crop\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(__UpperCamelCase\t\t\t\t\t\t, \"\"\"size\"\"\" ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processing_class.from_dict(self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.size\t\t\t\t\t\t, {\"\"\"shortest_edge\"\"\": 18} )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.crop_size\t\t\t\t\t\t, {\"\"\"height\"\"\": 18, \"\"\"width\"\"\": 18} )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.image_processing_class.from_dict(self.image_processor_dict\t\t\t\t\t\t, size=42\t\t\t\t\t\t, crop_size=84 )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.size\t\t\t\t\t\t, {\"\"\"shortest_edge\"\"\": 42} )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.crop_size\t\t\t\t\t\t, {\"\"\"height\"\"\": 84, \"\"\"width\"\"\": 84} )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random PIL images\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=__UpperCamelCase )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(__UpperCamelCase\t\t\t\t\t\t, Image.Image )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t 1,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"height\"\"\"],\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"width\"\"\"],\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\timage_processing(__UpperCamelCase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"height\"\"\"],\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"width\"\"\"],\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random numpy tensors\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=__UpperCamelCase\t\t\t\t\t\t, numpify=__UpperCamelCase )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(__UpperCamelCase\t\t\t\t\t\t, np.ndarray )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t 1,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"height\"\"\"],\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"width\"\"\"],\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\timage_processing(__UpperCamelCase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"height\"\"\"],\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"width\"\"\"],\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random PyTorch tensors\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=__UpperCamelCase\t\t\t\t\t\t, torchify=__UpperCamelCase )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(__UpperCamelCase\t\t\t\t\t\t, torch.Tensor )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t 1,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"height\"\"\"],\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"width\"\"\"],\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\timage_processing(__UpperCamelCase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"height\"\"\"],\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.crop_size[\"\"\"width\"\"\"],\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n"},"code_codestyle":{"kind":"number","value":702,"string":"702"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import (\r\n OptionalDependencyNotAvailable,\r\n _LazyModule,\r\n is_flax_available,\r\n is_tf_available,\r\n is_tokenizers_available,\r\n is_torch_available,\r\n)\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] ={\r\n 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'],\r\n 'tokenization_roformer': ['RoFormerTokenizer'],\r\n}\r\n\r\ntry:\r\n\t\tif not is_tokenizers_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =['RoFormerTokenizerFast']\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =[\r\n\t\t 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'RoFormerForCausalLM',\r\n\t\t 'RoFormerForMaskedLM',\r\n\t\t 'RoFormerForMultipleChoice',\r\n\t\t 'RoFormerForQuestionAnswering',\r\n\t\t 'RoFormerForSequenceClassification',\r\n\t\t 'RoFormerForTokenClassification',\r\n\t\t 'RoFormerLayer',\r\n\t\t 'RoFormerModel',\r\n\t\t 'RoFormerPreTrainedModel',\r\n\t\t 'load_tf_weights_in_roformer',\r\n\t\t]\r\n\r\n\r\ntry:\r\n\t\tif not is_tf_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =[\r\n\t\t 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'TFRoFormerForCausalLM',\r\n\t\t 'TFRoFormerForMaskedLM',\r\n\t\t 'TFRoFormerForMultipleChoice',\r\n\t\t 'TFRoFormerForQuestionAnswering',\r\n\t\t 'TFRoFormerForSequenceClassification',\r\n\t\t 'TFRoFormerForTokenClassification',\r\n\t\t 'TFRoFormerLayer',\r\n\t\t 'TFRoFormerModel',\r\n\t\t 'TFRoFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\ntry:\r\n\t\tif not is_flax_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =[\r\n\t\t 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'FlaxRoFormerForMaskedLM',\r\n\t\t 'FlaxRoFormerForMultipleChoice',\r\n\t\t 'FlaxRoFormerForQuestionAnswering',\r\n\t\t 'FlaxRoFormerForSequenceClassification',\r\n\t\t 'FlaxRoFormerForTokenClassification',\r\n\t\t 'FlaxRoFormerModel',\r\n\t\t 'FlaxRoFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig\r\n\t\tfrom .tokenization_roformer import RoFormerTokenizer\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_tokenizers_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .tokenization_roformer_fast import RoFormerTokenizerFast\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_roformer import (\r\n\t\t\t\t ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t RoFormerForCausalLM,\r\n\t\t\t\t RoFormerForMaskedLM,\r\n\t\t\t\t RoFormerForMultipleChoice,\r\n\t\t\t\t RoFormerForQuestionAnswering,\r\n\t\t\t\t RoFormerForSequenceClassification,\r\n\t\t\t\t RoFormerForTokenClassification,\r\n\t\t\t\t RoFormerLayer,\r\n\t\t\t\t RoFormerModel,\r\n\t\t\t\t RoFormerPreTrainedModel,\r\n\t\t\t\t load_tf_weights_in_roformer,\r\n\t\t\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_tf_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_tf_roformer import (\r\n\t\t\t\t TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t TFRoFormerForCausalLM,\r\n\t\t\t\t TFRoFormerForMaskedLM,\r\n\t\t\t\t TFRoFormerForMultipleChoice,\r\n\t\t\t\t TFRoFormerForQuestionAnswering,\r\n\t\t\t\t TFRoFormerForSequenceClassification,\r\n\t\t\t\t TFRoFormerForTokenClassification,\r\n\t\t\t\t TFRoFormerLayer,\r\n\t\t\t\t TFRoFormerModel,\r\n\t\t\t\t TFRoFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_flax_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_flax_roformer import (\r\n\t\t\t\t FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t FlaxRoFormerForMaskedLM,\r\n\t\t\t\t FlaxRoFormerForMultipleChoice,\r\n\t\t\t\t FlaxRoFormerForQuestionAnswering,\r\n\t\t\t\t FlaxRoFormerForSequenceClassification,\r\n\t\t\t\t FlaxRoFormerForTokenClassification,\r\n\t\t\t\t FlaxRoFormerModel,\r\n\t\t\t\t FlaxRoFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305174,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom collections.abc import Generator\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t0, 1\r\n\t\t\t\t\t\twhile True:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tb, a + b\r\n\t\t\t\t\t\t\t\t\t\t\t\tyield b\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ = 1_0_0_0 ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t1\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tfibonacci_generator()\r\n\t\t\t\t\t\twhile len(str(next(lowerCAmelCase_ ) ) ) < n:\r\n\t\t\t\t\t\t\t\t\t\t\t\tanswer += 1\r\n\t\t\t\t\t\treturn answer + 1\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tprint(solution(int(str(input()).strip())))\r\n\r\n"},"code_codestyle":{"kind":"number","value":703,"string":"703"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\treturn int(input_a == input_a == 0 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tprint(\"\"\"Truth Table of NOR Gate:\"\"\" )\r\n\t\t\t\t\t\tprint(\"\"\"| Input 1 | Input 2 | Output |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 0 | 0 | {nor_gate(0 ,0 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 0 | 1 | {nor_gate(0 ,1 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 1 | 0 | {nor_gate(1 ,0 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 1 | 1 | {nor_gate(1 ,1 )} |\"\"\" )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\t\tmain()\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305175,"cells":{"code":{"kind":"string","value":"\r\r\r\rfrom ...processing_utils import ProcessorMixin\r\r\r\r\r\r\rclass _a (\t\t\t_UpperCAmelCase ):\r\t\t\t_UpperCamelCase: Union[str, Any] =\t\t[\"\"\"image_processor\"\"\", \"\"\"feature_extractor\"\"\"]\r\t\t\t_UpperCamelCase: List[str] =\t\t\"\"\"TvltImageProcessor\"\"\"\r\t\t\t_UpperCamelCase: Optional[Any] =\t\t\"\"\"TvltFeatureExtractor\"\"\"\r\r\r\r\r\r\r\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[int]:\r\t\t\t\t\t\t\t\t\tsuper().__init__(image_processor=lowercase__\t\t\t\t\t\t, feature_extractor=lowercase__ )\r\r\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\timage_processor\r\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tfeature_extractor\r\r\r\r\r\r\r\t\t\tdef __call__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, *lowercase_\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tUnion[str, Any]:\r\r\t\t\t\t\t\t\t\t\tif images is None and audio is None:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"You need to specify either an `images` or `audio` input to process.\"\"\" )\r\r\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tNone\r\t\t\t\t\t\t\t\t\tif images is not None:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processor(lowercase__\t\t\t\t\t\t, mask_pixel=lowercase__\t\t\t\t\t\t, *lowercase__\t\t\t\t\t\t, **lowercase__ )\r\t\t\t\t\t\t\t\t\tif images_mixed is not None:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.image_processor(lowercase__\t\t\t\t\t\t, is_mixed=lowercase__\t\t\t\t\t\t, *lowercase__\t\t\t\t\t\t, **lowercase__ )\r\t\t\t\t\t\t\t\t\tif audio is not None:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself.feature_extractor(\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t lowercase__\t\t\t\t\t\t, *lowercase__\t\t\t\t\t\t, sampling_rate=lowercase__\t\t\t\t\t\t, mask_audio=lowercase__\t\t\t\t\t\t, **lowercase__ )\r\r\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{}\r\t\t\t\t\t\t\t\t\tif audio is not None:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\toutput_dict.update(lowercase__ )\r\t\t\t\t\t\t\t\t\tif images is not None:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\toutput_dict.update(lowercase__ )\r\t\t\t\t\t\t\t\t\tif images_mixed_dict is not None:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\toutput_dict.update(lowercase__ )\r\t\t\t\t\t\t\t\t\treturn output_dict\r\r\r\r\r\r\r\t\t\t@property\r\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.image_processor.model_input_names\r\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.feature_extractor.model_input_names\r\t\t\t\t\t\t\t\t\treturn list(dict.fromkeys(image_processor_input_names + feature_extractor_input_names ) )\r\r"},"code_codestyle":{"kind":"number","value":704,"string":"704"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ={\r\n 'configuration_poolformer': [\r\n 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP',\r\n 'PoolFormerConfig',\r\n 'PoolFormerOnnxConfig',\r\n ]\r\n}\r\n\r\ntry:\r\n\t\tif not is_vision_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =['PoolFormerFeatureExtractor']\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =['PoolFormerImageProcessor']\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =[\r\n\t\t 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'PoolFormerForImageClassification',\r\n\t\t 'PoolFormerModel',\r\n\t\t 'PoolFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_poolformer import (\r\n\t\t POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP,\r\n\t\t PoolFormerConfig,\r\n\t\t PoolFormerOnnxConfig,\r\n\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_vision_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .feature_extraction_poolformer import PoolFormerFeatureExtractor\r\n\t\t\t\tfrom .image_processing_poolformer import PoolFormerImageProcessor\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_poolformer import (\r\n\t\t\t\t POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t PoolFormerForImageClassification,\r\n\t\t\t\t PoolFormerModel,\r\n\t\t\t\t PoolFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =_LazyModule(__name__, globals()['__file__'], _import_structure)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305176,"cells":{"code":{"kind":"string","value":"import builtins\r\nimport sys\r\n\r\nfrom ...utils.imports import _is_package_available\r\nfrom . import cursor, input\r\nfrom .helpers import Direction, clear_line, forceWrite, linebreak, move_cursor, reset_cursor, writeColor\r\nfrom .keymap import KEYMAP\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =False\r\ntry:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =_is_package_available('google.colab')\r\nexcept ModuleNotFoundError:\r\n\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n@input.register\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = [] ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tchoices\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tprompt\r\n\t\t\t\t\t\t\t\t\tif sys.platform == \"win32\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t'''*'''\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t'''➔ '''\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = \"\" ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tif sys.platform != \"win32\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twriteColor(self.choices[index]\t\t\t\t\t\t, 32\t\t\t\t\t\t, __snake_case )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tforceWrite(self.choices[index]\t\t\t\t\t\t, __snake_case )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tif index == self.position:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tforceWrite(f\"\"\" {self.arrow_char} \"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.write_choice(__snake_case )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tforceWrite(f\"\"\" {self.choices[index]}\"\"\" )\r\n\t\t\t\t\t\t\t\t\treset_cursor()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = 1 ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.position\r\n\t\t\t\t\t\t\t\t\tif direction == Direction.DOWN:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif self.position + 1 >= len(self.choices ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.position += num_spaces\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif self.position - 1 < 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.position -= num_spaces\r\n\t\t\t\t\t\t\t\t\tclear_line()\r\n\t\t\t\t\t\t\t\t\tself.print_choice(__snake_case )\r\n\t\t\t\t\t\t\t\t\tmove_cursor(__snake_case\t\t\t\t\t\t, direction.name )\r\n\t\t\t\t\t\t\t\t\tself.print_choice(self.position )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@input.mark(KEYMAP[\"\"\"up\"\"\"] )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tself.move_direction(Direction.UP )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@input.mark(KEYMAP[\"\"\"down\"\"\"] )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tself.move_direction(Direction.DOWN )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@input.mark(KEYMAP[\"\"\"newline\"\"\"] )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tmove_cursor(len(self.choices ) - self.position\t\t\t\t\t\t, \"\"\"DOWN\"\"\" )\r\n\t\t\t\t\t\t\t\t\treturn self.position\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@input.mark(KEYMAP[\"\"\"interrupt\"\"\"] )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tmove_cursor(len(self.choices ) - self.position\t\t\t\t\t\t, \"\"\"DOWN\"\"\" )\r\n\t\t\t\t\t\t\t\t\traise KeyboardInterrupt\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@input.mark_multiple(*[KEYMAP[str(__snake_case )] for number in range(10 )] )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tint(chr(self.current_selection ) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tindex - self.position\r\n\t\t\t\t\t\t\t\t\tif index == self.position:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\t\t\t\t\t\t\t\t\tif index < len(self.choices ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif self.position > index:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.move_direction(Direction.UP\t\t\t\t\t\t, -movement )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif self.position < index:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.move_direction(Direction.DOWN\t\t\t\t\t\t, __snake_case )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ = 0 ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tif self.prompt:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlinebreak()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tforceWrite(self.prompt\t\t\t\t\t\t, \"\"\"\\n\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif in_colab:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tforceWrite(\"\"\"Please input a choice index (starting from 0), and press enter\"\"\"\t\t\t\t\t\t, \"\"\"\\n\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tforceWrite(\"\"\"Please select a choice using the arrow or number keys, and selecting with enter\"\"\"\t\t\t\t\t\t, \"\"\"\\n\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdefault_choice\r\n\t\t\t\t\t\t\t\t\tfor i in range(len(self.choices ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.print_choice(__snake_case )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tforceWrite(\"\"\"\\n\"\"\" )\r\n\t\t\t\t\t\t\t\t\tmove_cursor(len(self.choices ) - self.position\t\t\t\t\t\t, \"\"\"UP\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith cursor.hide():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twhile True:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif in_colab:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tint(builtins.input() )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texcept ValueError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdefault_choice\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.handle_input()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif choice is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treset_cursor()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor _ in range(len(self.choices ) + 1 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmove_cursor(1\t\t\t\t\t\t, \"\"\"UP\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tclear_line()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.write_choice(__snake_case\t\t\t\t\t\t, \"\"\"\\n\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn choice\r\n\r\n"},"code_codestyle":{"kind":"number","value":705,"string":"705"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport os\r\nimport string\r\nimport sys\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =1 << 8\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={\r\n 'tab': ord('\\t'),\r\n 'newline': ord('\\r'),\r\n 'esc': 27,\r\n 'up': 65 + ARROW_KEY_FLAG,\r\n 'down': 66 + ARROW_KEY_FLAG,\r\n 'right': 67 + ARROW_KEY_FLAG,\r\n 'left': 68 + ARROW_KEY_FLAG,\r\n 'mod_int': 91,\r\n 'undefined': sys.maxsize,\r\n 'interrupt': 3,\r\n 'insert': 50,\r\n 'delete': 51,\r\n 'pg_up': 53,\r\n 'pg_down': 54,\r\n}\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =KEYMAP['up']\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =KEYMAP['left']\r\n\r\nif sys.platform == \"win32\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =[]\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ={\r\n\t\t b'\\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00H': KEYMAP['up'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00P': KEYMAP['down'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00M': KEYMAP['right'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00K': KEYMAP['left'] - ARROW_KEY_FLAG,\r\n\t\t}\r\n\r\nfor i in range(10):\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =ord(str(i))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif os.name == \"nt\":\r\n\t\t\t\t\t\t\t\t\t\t\t\timport msvcrt\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t\"\"\"mbcs\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Flush the keyboard buffer\r\n\t\t\t\t\t\t\t\t\t\t\t\twhile msvcrt.kbhit():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmsvcrt.getch()\r\n\t\t\t\t\t\t\t\t\t\t\t\tif len(SCREAMING_SNAKE_CASE__ ) == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Read the keystroke\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmsvcrt.getch()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# If it is a prefix char, get second part\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ch in (b\"\\x00\", b\"\\xe0\"):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tch + msvcrt.getch()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Translate actual Win chars to bullet char types\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tchr(WIN_KEYMAP[cha] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(chr(KEYMAP[\"\"\"mod_int\"\"\"] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) in (\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"insert\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"delete\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"pg_up\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"pg_down\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(chr(1_2_6 ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tchr(KEYMAP[\"\"\"esc\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texcept KeyError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tcha[1]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tch.decode(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tWIN_CH_BUFFER.pop(0 )\r\n\t\t\t\t\t\telif os.name == \"posix\":\r\n\t\t\t\t\t\t\t\t\t\t\t\timport termios\r\n\t\t\t\t\t\t\t\t\t\t\t\timport tty\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tsys.stdin.fileno()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttermios.tcgetattr(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttty.setraw(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tsys.stdin.read(1 )\r\n\t\t\t\t\t\t\t\t\t\t\t\tfinally:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttermios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\treturn ch\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP[\"interrupt\"], KEYMAP[\"newline\"]]:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn char\r\n\r\n\t\t\t\t\t\telif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP[\"esc\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP[\"mod_int\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP[\"arrow_begin\"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP[\"arrow_end\"] - ARROW_KEY_FLAG:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn KEYMAP[\"undefined\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn get_raw_chars()\r\n\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif char in string.printable:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn char\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn KEYMAP[\"undefined\"]\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305177,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import Dict, List\r\n\r\nfrom nltk.translate import gleu_score\r\n\r\nimport datasets\r\nfrom datasets import MetricInfo\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =\"\"\"\\\n@misc{wu2016googles,\n title={Google's Neural Machine Translation System: Bridging the Gap between Human and Machine Translation},\n author={Yonghui Wu and Mike Schuster and Zhifeng Chen and Quoc V. Le and Mohammad Norouzi and Wolfgang Macherey\n and Maxim Krikun and Yuan Cao and Qin Gao and Klaus Macherey and Jeff Klingner and Apurva Shah and Melvin\n Johnson and Xiaobing Liu and Łukasz Kaiser and Stephan Gouws and Yoshikiyo Kato and Taku Kudo and Hideto\n Kazawa and Keith Stevens and George Kurian and Nishant Patil and Wei Wang and Cliff Young and\n Jason Smith and Jason Riesa and Alex Rudnick and Oriol Vinyals and Greg Corrado and Macduff Hughes\n and Jeffrey Dean},\n year={2016},\n eprint={1609.08144},\n archivePrefix={arXiv},\n primaryClass={cs.CL}\n}\n\"\"\"\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =\"\"\"\\\nThe BLEU score has some undesirable properties when used for single\nsentences, as it was designed to be a corpus measure. We therefore\nuse a slightly different score for our RL experiments which we call\nthe 'GLEU score'. For the GLEU score, we record all sub-sequences of\n1, 2, 3 or 4 tokens in output and target sequence (n-grams). We then\ncompute a recall, which is the ratio of the number of matching n-grams\nto the number of total n-grams in the target (ground truth) sequence,\nand a precision, which is the ratio of the number of matching n-grams\nto the number of total n-grams in the generated output sequence. Then\nGLEU score is simply the minimum of recall and precision. This GLEU\nscore's range is always between 0 (no matches) and 1 (all match) and\nit is symmetrical when switching output and target. According to\nour experiments, GLEU score correlates quite well with the BLEU\nmetric on a corpus level but does not have its drawbacks for our per\nsentence reward objective.\n\"\"\"\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =\"\"\"\\\nComputes corpus-level Google BLEU (GLEU) score of translated segments against one or more references.\nInstead of averaging the sentence level GLEU scores (i.e. macro-average precision), Wu et al. (2016) sum up the matching\ntokens and the max of hypothesis and reference tokens for each sentence, then compute using the aggregate values.\n\nArgs:\n predictions (list of str): list of translations to score.\n Each translation should be tokenized into a list of tokens.\n references (list of list of str): list of lists of references for each translation.\n Each reference should be tokenized into a list of tokens.\n min_len (int): The minimum order of n-gram this function should extract. Defaults to 1.\n max_len (int): The maximum order of n-gram this function should extract. Defaults to 4.\n\nReturns:\n 'google_bleu': google_bleu score\n\nExamples:\n Example 1:\n >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which',\n ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always',\n ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat']\n >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which',\n ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never',\n ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat']\n\n >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was',\n ... 'interested', 'in', 'world', 'history']\n >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history',\n ... 'because', 'he', 'read', 'the', 'book']\n\n >>> list_of_references = [[ref1a], [ref2a]]\n >>> hypotheses = [hyp1, hyp2]\n >>> google_bleu = datasets.load_metric(\\\"google_bleu\\\")\n >>> results = google_bleu.compute(predictions=hypotheses, references=list_of_references)\n >>> print(round(results[\\\"google_bleu\\\"], 2))\n 0.44\n\n Example 2:\n >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which',\n ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always',\n ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat']\n >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which',\n ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never',\n ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat']\n >>> ref1b = ['It', 'is', 'a', 'guide', 'to', 'action', 'that',\n ... 'ensures', 'that', 'the', 'rubber', 'duck', 'will', 'never',\n ... 'heed', 'the', 'cat', 'commands']\n >>> ref1c = ['It', 'is', 'the', 'practical', 'guide', 'for', 'the',\n ... 'rubber', 'duck', 'army', 'never', 'to', 'heed', 'the', 'directions',\n ... 'of', 'the', 'cat']\n\n >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was',\n ... 'interested', 'in', 'world', 'history']\n >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history',\n ... 'because', 'he', 'read', 'the', 'book']\n\n >>> list_of_references = [[ref1a, ref1b, ref1c], [ref2a]]\n >>> hypotheses = [hyp1, hyp2]\n >>> google_bleu = datasets.load_metric(\\\"google_bleu\\\")\n >>> results = google_bleu.compute(predictions=hypotheses, references=list_of_references)\n >>> print(round(results[\\\"google_bleu\\\"], 2))\n 0.61\n\n Example 3:\n >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which',\n ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always',\n ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat']\n >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which',\n ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never',\n ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat']\n >>> ref1b = ['It', 'is', 'a', 'guide', 'to', 'action', 'that',\n ... 'ensures', 'that', 'the', 'rubber', 'duck', 'will', 'never',\n ... 'heed', 'the', 'cat', 'commands']\n >>> ref1c = ['It', 'is', 'the', 'practical', 'guide', 'for', 'the',\n ... 'rubber', 'duck', 'army', 'never', 'to', 'heed', 'the', 'directions',\n ... 'of', 'the', 'cat']\n\n >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was',\n ... 'interested', 'in', 'world', 'history']\n >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history',\n ... 'because', 'he', 'read', 'the', 'book']\n\n >>> list_of_references = [[ref1a, ref1b, ref1c], [ref2a]]\n >>> hypotheses = [hyp1, hyp2]\n >>> google_bleu = datasets.load_metric(\\\"google_bleu\\\")\n >>> results = google_bleu.compute(predictions=hypotheses, references=list_of_references, min_len=2)\n >>> print(round(results[\\\"google_bleu\\\"], 2))\n 0.53\n\n Example 4:\n >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which',\n ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always',\n ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat']\n >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which',\n ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never',\n ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat']\n >>> ref1b = ['It', 'is', 'a', 'guide', 'to', 'action', 'that',\n ... 'ensures', 'that', 'the', 'rubber', 'duck', 'will', 'never',\n ... 'heed', 'the', 'cat', 'commands']\n >>> ref1c = ['It', 'is', 'the', 'practical', 'guide', 'for', 'the',\n ... 'rubber', 'duck', 'army', 'never', 'to', 'heed', 'the', 'directions',\n ... 'of', 'the', 'cat']\n\n >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was',\n ... 'interested', 'in', 'world', 'history']\n >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history',\n ... 'because', 'he', 'read', 'the', 'book']\n\n >>> list_of_references = [[ref1a, ref1b, ref1c], [ref2a]]\n >>> hypotheses = [hyp1, hyp2]\n >>> google_bleu = datasets.load_metric(\\\"google_bleu\\\")\n >>> results = google_bleu.compute(predictions=hypotheses,references=list_of_references, min_len=2, max_len=6)\n >>> print(round(results[\\\"google_bleu\\\"], 2))\n 0.4\n\"\"\"\r\n\r\n\r\n\r\n\r\n\r\n\r\n@datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION , _KWARGS_DESCRIPTION )\r\nclass _a (\t\t\tdatasets.Metric ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tMetricInfo:\r\n\t\t\t\t\t\t\t\t\treturn datasets.MetricInfo(\r\n\t\t\t\t\t\t\t\t\t description=_DESCRIPTION\t\t\t\t\t\t, citation=_CITATION\t\t\t\t\t\t, inputs_description=_KWARGS_DESCRIPTION\t\t\t\t\t\t, features=datasets.Features(\r\n\t\t\t\t\t\t\t\t\t {\r\n\t\t\t\t\t\t\t\t\t \"\"\"predictions\"\"\": datasets.Sequence(datasets.Value(\"\"\"string\"\"\"\t\t\t\t\t\t, id=\"\"\"token\"\"\" )\t\t\t\t\t\t, id=\"\"\"sequence\"\"\" ),\r\n\t\t\t\t\t\t\t\t\t \"\"\"references\"\"\": datasets.Sequence(\r\n\t\t\t\t\t\t\t\t\t datasets.Sequence(datasets.Value(\"\"\"string\"\"\"\t\t\t\t\t\t, id=\"\"\"token\"\"\" )\t\t\t\t\t\t, id=\"\"\"sequence\"\"\" )\t\t\t\t\t\t, id=\"\"\"references\"\"\" ),\r\n\t\t\t\t\t\t\t\t\t } )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = 1\t\t\t\t\t\t, lowercase_ = 4\t\t\t\t\t\t, ) ->\t\t\t\tDict[str, float]:\r\n\t\t\t\t\t\t\t\t\treturn {\r\n\t\t\t\t\t\t\t\t\t \"google_bleu\": gleu_score.corpus_gleu(\r\n\t\t\t\t\t\t\t\t\t list_of_references=lowercase_\t\t\t\t\t\t, hypotheses=lowercase_\t\t\t\t\t\t, min_len=lowercase_\t\t\t\t\t\t, max_len=lowercase_ )\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n"},"code_codestyle":{"kind":"number","value":706,"string":"706"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\n# Imports\r\nimport numpy as np\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tself.set_matricies(red=lowercase_\t\t\t\t\t\t, green=lowercase_\t\t\t\t\t\t, blue=lowercase_\t\t\t\t\t\t, red_edge=lowercase_\t\t\t\t\t\t, nir=lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tif red is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tred\r\n\t\t\t\t\t\t\t\t\tif green is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tgreen\r\n\t\t\t\t\t\t\t\t\tif blue is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tblue\r\n\t\t\t\t\t\t\t\t\tif red_edge is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tred_edge\r\n\t\t\t\t\t\t\t\t\tif nir is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tnir\r\n\t\t\t\t\t\t\t\t\treturn True\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=\"\"\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tself.set_matricies(red=lowercase_\t\t\t\t\t\t, green=lowercase_\t\t\t\t\t\t, blue=lowercase_\t\t\t\t\t\t, red_edge=lowercase_\t\t\t\t\t\t, nir=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"ARVI2\"\"\": self.arvaa,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CCCI\"\"\": self.ccci,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CVI\"\"\": self.cvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GLI\"\"\": self.gli,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NDVI\"\"\": self.ndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"BNDVI\"\"\": self.bndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"redEdgeNDVI\"\"\": self.red_edge_ndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GNDVI\"\"\": self.gndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GBNDVI\"\"\": self.gbndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GRNDVI\"\"\": self.grndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RBNDVI\"\"\": self.rbndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"PNDVI\"\"\": self.pndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"ATSAVI\"\"\": self.atsavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"BWDRVI\"\"\": self.bwdrvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CIgreen\"\"\": self.ci_green,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CIrededge\"\"\": self.ci_rededge,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CI\"\"\": self.ci,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CTVI\"\"\": self.ctvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GDVI\"\"\": self.gdvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"EVI\"\"\": self.evi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GEMI\"\"\": self.gemi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GOSAVI\"\"\": self.gosavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GSAVI\"\"\": self.gsavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"Hue\"\"\": self.hue,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IVI\"\"\": self.ivi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IPVI\"\"\": self.ipvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"I\"\"\": self.i,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RVI\"\"\": self.rvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"MRVI\"\"\": self.mrvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"MSAVI\"\"\": self.m_savi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormG\"\"\": self.norm_g,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormNIR\"\"\": self.norm_nir,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormR\"\"\": self.norm_r,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NGRDI\"\"\": self.ngrdi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RI\"\"\": self.ri,\r\n\t\t\t\t\t\t\t\t\t \"\"\"S\"\"\": self.s,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IF\"\"\": self._if,\r\n\t\t\t\t\t\t\t\t\t \"\"\"DVI\"\"\": self.dvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"TVI\"\"\": self.tvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NDRE\"\"\": self.ndre,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn funcs[index]()\r\n\t\t\t\t\t\t\t\t\texcept KeyError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(\"\"\"Index not in the list!\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red)))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / (\r\n\t\t\t\t\t\t\t\t\t (self.nir - self.red) / (self.nir + self.red)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir * (self.red / (self.green**2))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (2 * self.green - self.red - self.blue) / (\r\n\t\t\t\t\t\t\t\t\t 2 * self.green + self.red + self.blue\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.red) / (self.nir + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.blue) / (self.nir + self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.redEdge - self.red) / (self.redEdge + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.green) / (self.nir + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.blue)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.blue)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.red)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.red)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.red + self.blue)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.red + self.blue)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.0_8\t\t\t\t\t\t, lowercase_=1.2_2\t\t\t\t\t\t, lowercase_=0.0_3 ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn a * (\r\n\t\t\t\t\t\t\t\t\t (self.nir - a * self.red - b)\r\n\t\t\t\t\t\t\t\t\t / (a * self.nir + self.red - a * b + x * (1 + a**2))\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / self.green) - 1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / self.redEdge) - 1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.red - self.blue) / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.ndvi()\r\n\t\t\t\t\t\t\t\t\treturn ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir - self.green\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn 2.5 * (\r\n\t\t\t\t\t\t\t\t\t (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + self.red + 0.5\r\n\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\treturn n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.1_6 ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.green) / (self.nir + self.green + y)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.5 ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn np.arctan(\r\n\t\t\t\t\t\t\t\t\t ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - b) / (a * self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.red + self.green + self.blue) / 3_0.5\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.rvi() - 1) / (self.rvi() + 1)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn (\r\n\t\t\t\t\t\t\t\t\t (2 * self.nir + 1)\r\n\t\t\t\t\t\t\t\t\t - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2)\r\n\t\t\t\t\t\t\t\t\t) / 2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.green / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn self.red / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.green - self.red) / (self.green + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.red - self.green) / (self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnp.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] )\r\n\t\t\t\t\t\t\t\t\treturn (max_value - min_value) / max_value\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (2 * self.red - self.green - self.blue) / (self.green - self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.ndvi() + 0.5) ** (1 / 2)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.redEdge) / (self.nir + self.redEdge)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305178,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport argparse\r\nimport os\r\nimport shutil\r\n\r\nimport torch\r\nfrom emmental.modules import MagnitudeBinarizer, ThresholdBinarizer, TopKBinarizer\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\targs.pruning_method\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\targs.threshold\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\targs.model_name_or_path.rstrip(\"\"\"/\"\"\" )\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\targs.target_model_path\r\n\r\n\t\t\t\t\t\tprint(F\"\"\"Load fine-pruned model from {model_name_or_path}\"\"\" )\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttorch.load(os.path.join(lowercase_ ,\"\"\"pytorch_model.bin\"\"\" ) )\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t{}\r\n\r\n\t\t\t\t\t\tfor name, tensor in model.items():\r\n\t\t\t\t\t\t\t\t\t\t\t\tif \"embeddings\" in name or \"LayerNorm\" in name or \"pooler\" in name:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttensor\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Copied layer {name}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\telif \"classifier\" in name or \"qa_output\" in name:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttensor\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Copied layer {name}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\telif \"bias\" in name:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttensor\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Copied layer {name}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif pruning_method == \"magnitude\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tMagnitudeBinarizer.apply(inputs=lowercase_ ,threshold=lowercase_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttensor * mask\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Pruned layer {name}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif pruning_method == \"topK\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif \"mask_scores\" in name:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcontinue\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tname[:-6]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmodel[F\"\"\"{prefix_}mask_scores\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tTopKBinarizer.apply(lowercase_ ,lowercase_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttensor * mask\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Pruned layer {name}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif pruning_method == \"sigmoied_threshold\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif \"mask_scores\" in name:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcontinue\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tname[:-6]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmodel[F\"\"\"{prefix_}mask_scores\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tThresholdBinarizer.apply(lowercase_ ,lowercase_ ,lowercase_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttensor * mask\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Pruned layer {name}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif pruning_method == \"l0\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif \"mask_scores\" in name:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcontinue\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tname[:-6]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmodel[F\"\"\"{prefix_}mask_scores\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t-0.1, 1.1\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttorch.sigmoid(lowercase_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ts * (r - l) + l\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ts_bar.clamp(min=0.0 ,max=1.0 )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttensor * mask\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Pruned layer {name}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"Unknown pruning method\"\"\" )\r\n\r\n\t\t\t\t\t\tif target_model_path is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tos.path.join(\r\n\t\t\t\t\t\t\t\t\t\t\t\t os.path.dirname(lowercase_ ) ,F\"\"\"bertarized_{os.path.basename(lowercase_ )}\"\"\" )\r\n\r\n\t\t\t\t\t\tif not os.path.isdir(lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tshutil.copytree(lowercase_ ,lowercase_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"\\nCreated folder {target_model_path}\"\"\" )\r\n\r\n\t\t\t\t\t\ttorch.save(lowercase_ ,os.path.join(lowercase_ ,\"\"\"pytorch_model.bin\"\"\" ) )\r\n\t\t\t\t\t\tprint(\"\"\"\\nPruned model saved! See you later!\"\"\" )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =argparse.ArgumentParser()\r\n\r\n\t\tparser.add_argument(\r\n\t\t '--pruning_method',\r\n\t\t choices=['l0', 'magnitude', 'topK', 'sigmoied_threshold'],\r\n\t\t type=str,\r\n\t\t required=True,\r\n\t\t help=(\r\n\t\t 'Pruning Method (l0 = L0 regularization, magnitude = Magnitude pruning, topK = Movement pruning,'\r\n\t\t ' sigmoied_threshold = Soft movement pruning)'\r\n\t\t ),\r\n\t\t)\r\n\t\tparser.add_argument(\r\n\t\t '--threshold',\r\n\t\t type=float,\r\n\t\t required=False,\r\n\t\t help=(\r\n\t\t 'For `magnitude` and `topK`, it is the level of remaining weights (in %) in the fine-pruned model.'\r\n\t\t 'For `sigmoied_threshold`, it is the threshold \\tau against which the (sigmoied) scores are compared.'\r\n\t\t 'Not needed for `l0`'\r\n\t\t ),\r\n\t\t)\r\n\t\tparser.add_argument(\r\n\t\t '--model_name_or_path',\r\n\t\t type=str,\r\n\t\t required=True,\r\n\t\t help='Folder containing the model that was previously fine-pruned',\r\n\t\t)\r\n\t\tparser.add_argument(\r\n\t\t '--target_model_path',\r\n\t\t default=None,\r\n\t\t type=str,\r\n\t\t required=False,\r\n\t\t help='Folder containing the model that was previously fine-pruned',\r\n\t\t)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =parser.parse_args()\r\n\r\n\t\tmain(args)\r\n\r\n\r\n\r\n\r\n"},"code_codestyle":{"kind":"number","value":707,"string":"707"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport argparse\r\nimport json\r\nimport math\r\nimport os\r\nimport time\r\nimport traceback\r\nimport zipfile\r\nfrom collections import Counter\r\n\r\nimport requests\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif token is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{\"\"\"Accept\"\"\": \"\"\"application/vnd.github+json\"\"\", \"\"\"Authorization\"\"\": F\"\"\"Bearer {token}\"\"\"}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tF\"\"\"https://api.github.com/repos/huggingface/transformers/actions/runs/{workflow_run_id}/jobs?per_page=100\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t{}\r\n\r\n\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\tjob_links.update({job[\"\"\"name\"\"\"]: job[\"\"\"html_url\"\"\"] for job in result[\"\"\"jobs\"\"\"]} )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmath.ceil((result[\"\"\"total_count\"\"\"] - 1_0_0) / 1_0_0 )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor i in range(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\trequests.get(url + F\"\"\"&page={i + 2}\"\"\" ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tjob_links.update({job[\"\"\"name\"\"\"]: job[\"\"\"html_url\"\"\"] for job in result[\"\"\"jobs\"\"\"]} )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn job_links\r\n\t\t\t\t\t\texcept Exception:\r\n\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Unknown error, could not fetch links:\\n{traceback.format_exc()}\"\"\" )\r\n\r\n\t\t\t\t\t\treturn {}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif token is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t{\"\"\"Accept\"\"\": \"\"\"application/vnd.github+json\"\"\", \"\"\"Authorization\"\"\": F\"\"\"Bearer {token}\"\"\"}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tF\"\"\"https://api.github.com/repos/huggingface/transformers/actions/runs/{worflow_run_id}/artifacts?per_page=100\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t{}\r\n\r\n\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\tartifacts.update({artifact[\"\"\"name\"\"\"]: artifact[\"\"\"archive_download_url\"\"\"] for artifact in result[\"\"\"artifacts\"\"\"]} )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tmath.ceil((result[\"\"\"total_count\"\"\"] - 1_0_0) / 1_0_0 )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor i in range(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\trequests.get(url + F\"\"\"&page={i + 2}\"\"\" ,headers=SCREAMING_SNAKE_CASE__ ).json()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tartifacts.update({artifact[\"\"\"name\"\"\"]: artifact[\"\"\"archive_download_url\"\"\"] for artifact in result[\"\"\"artifacts\"\"\"]} )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn artifacts\r\n\t\t\t\t\t\texcept Exception:\r\n\t\t\t\t\t\t\t\t\t\t\t\tprint(F\"\"\"Unknown error, could not fetch links:\\n{traceback.format_exc()}\"\"\" )\r\n\r\n\t\t\t\t\t\treturn {}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif token is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\"\"\"Accept\"\"\": \"\"\"application/vnd.github+json\"\"\", \"\"\"Authorization\"\"\": F\"\"\"Bearer {token}\"\"\"}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tresult.headers[\"\"\"Location\"\"\"]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\trequests.get(SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(SCREAMING_SNAKE_CASE__ ,F\"\"\"{artifact_name}.zip\"\"\" )\r\n\t\t\t\t\t\twith open(SCREAMING_SNAKE_CASE__ ,\"\"\"wb\"\"\" ) as fp:\r\n\t\t\t\t\t\t\t\t\t\t\t\tfp.write(response.content )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\twith zipfile.ZipFile(SCREAMING_SNAKE_CASE__ ) as z:\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor filename in z.namelist():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif not os.path.isdir(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# read the file\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif filename in [\"failures_line.txt\", \"summary_short.txt\", \"job_name.txt\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith z.open(SCREAMING_SNAKE_CASE__ ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor line in f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tline.decode(\"\"\"UTF-8\"\"\" ).strip()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif filename == \"failures_line.txt\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# `error_line` is the place where `error` occurs\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tline[: line.index(\"\"\": \"\"\" )]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tline[line.index(\"\"\": \"\"\" ) + len(\"\"\": \"\"\" ) :]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\terrors.append([error_line, error] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texcept Exception:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# skip un-related lines\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tpass\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif filename == \"summary_short.txt\" and line.startswith(\"\"\"FAILED \"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# `test` is the test method that failed\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tline[len(\"\"\"FAILED \"\"\" ) :]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfailed_tests.append(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif filename == \"job_name.txt\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tline\r\n\r\n\t\t\t\t\t\tif len(SCREAMING_SNAKE_CASE__ ) != len(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t F\"\"\"`errors` and `failed_tests` should have the same number of elements. Got {len(SCREAMING_SNAKE_CASE__ )} for `errors` \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t F\"\"\"and {len(SCREAMING_SNAKE_CASE__ )} for `failed_tests` instead. The test reports in {artifact_zip_path} have some\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t \"\"\" problem.\"\"\" )\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tif job_name and job_links:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tjob_links.get(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\t# A list with elements of the form (line of error, error, failed test)\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[x + [y] + [job_link] for x, y in zip(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )]\r\n\r\n\t\t\t\t\t\treturn result\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[]\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[os.path.join(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) for p in os.listdir(SCREAMING_SNAKE_CASE__ ) if p.endswith(\"\"\".zip\"\"\" )]\r\n\t\t\t\t\t\tfor p in paths:\r\n\t\t\t\t\t\t\t\t\t\t\t\terrors.extend(get_errors_from_single_artifact(SCREAMING_SNAKE_CASE__ ,job_links=SCREAMING_SNAKE_CASE__ ) )\r\n\r\n\t\t\t\t\t\treturn errors\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tCounter()\r\n\t\t\t\t\t\tcounter.update([x[1] for x in logs] )\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tcounter.most_common()\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{}\r\n\t\t\t\t\t\tfor error, count in counts:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif error_filter is None or error not in error_filter:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t{\"\"\"count\"\"\": count, \"\"\"failed_tests\"\"\": [(x[2], x[0]) for x in logs if x[1] == error]}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1][\"count\"] ,reverse=SCREAMING_SNAKE_CASE__ ) )\r\n\t\t\t\t\t\treturn r\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ttest.split(\"\"\"::\"\"\" )[0]\r\n\t\t\t\t\t\tif test.startswith(\"\"\"tests/models/\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttest.split(\"\"\"/\"\"\" )[2]\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\treturn test\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[(x[0], x[1], get_model(x[2] )) for x in logs]\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[x for x in logs if x[2] is not None]\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{x[2] for x in logs}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t{}\r\n\t\t\t\t\t\tfor test in tests:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tCounter()\r\n\t\t\t\t\t\t\t\t\t\t\t\t# count by errors in `test`\r\n\t\t\t\t\t\t\t\t\t\t\t\tcounter.update([x[1] for x in logs if x[2] == test] )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tcounter.most_common()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{error: count for error, count in counts if (error_filter is None or error not in error_filter)}\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tsum(error_counts.values() )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif n_errors > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t{\"\"\"count\"\"\": n_errors, \"\"\"errors\"\"\": error_counts}\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1][\"count\"] ,reverse=SCREAMING_SNAKE_CASE__ ) )\r\n\t\t\t\t\t\treturn r\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t\"\"\"| no. | error | status |\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t\"\"\"|-:|:-|:-|\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[header, sep]\r\n\t\t\t\t\t\tfor error in reduced_by_error:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\treduced_by_error[error][\"\"\"count\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tF\"\"\"| {count} | {error[:1_0_0]} | |\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\tlines.append(SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\treturn \"\\n\".join(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t\"\"\"| model | no. of errors | major error | count |\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t\"\"\"|-:|-:|-:|-:|\"\"\"\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[header, sep]\r\n\t\t\t\t\t\tfor model in reduced_by_model:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\treduced_by_model[model][\"\"\"count\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tlist(reduced_by_model[model][\"\"\"errors\"\"\"].items() )[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tF\"\"\"| {model} | {count} | {error[:6_0]} | {_count} |\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\tlines.append(SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\treturn \"\\n\".join(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =argparse.ArgumentParser()\r\n\t\t# Required parameters\r\n\t\tparser.add_argument('--workflow_run_id', type=str, required=True, help='A GitHub Actions workflow run id.')\r\n\t\tparser.add_argument(\r\n\t\t '--output_dir',\r\n\t\t type=str,\r\n\t\t required=True,\r\n\t\t help='Where to store the downloaded artifacts and other result files.',\r\n\t\t)\r\n\t\tparser.add_argument('--token', default=None, type=str, help='A token that has actions:read permission.')\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =parser.parse_args()\r\n\r\n\t\tos.makedirs(args.output_dir, exist_ok=True)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =get_job_links(args.workflow_run_id, token=args.token)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={}\r\n\t\t# To deal with `workflow_call` event, where a job name is the combination of the job names in the caller and callee.\r\n\t\t# For example, `PyTorch 1.11 / Model tests (models/albert, single-gpu)`.\r\n\t\tif _job_links:\r\n\t\t\t\tfor k, v in _job_links.items():\r\n\t\t\t\t\t\t# This is how GitHub actions combine job names.\r\n\t\t\t\t\t\tif \" / \" in k:\r\n\t\t\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =k.find(' / ')\r\n\t\t\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =k[index + len(' / ') :]\r\n\t\t\t\t\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =v\r\n\t\twith open(os.path.join(args.output_dir, 'job_links.json'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tjson.dump(job_links, fp, ensure_ascii=False, indent=4)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =get_artifacts_links(args.workflow_run_id, token=args.token)\r\n\t\twith open(os.path.join(args.output_dir, 'artifacts.json'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tjson.dump(artifacts, fp, ensure_ascii=False, indent=4)\r\n\r\n\t\tfor idx, (name, url) in enumerate(artifacts.items()):\r\n\t\t\t\tdownload_artifact(name, url, args.output_dir, args.token)\r\n\t\t\t\t# Be gentle to GitHub\r\n\t\t\t\ttime.sleep(1)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =get_all_errors(args.output_dir, job_links=job_links)\r\n\r\n\t\t# `e[1]` is the error\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =Counter()\r\n\t\tcounter.update([e[1] for e in errors])\r\n\r\n\t\t# print the top 30 most common test errors\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =counter.most_common(30)\r\n\t\tfor item in most_common:\r\n\t\t\t\tprint(item)\r\n\r\n\t\twith open(os.path.join(args.output_dir, 'errors.json'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tjson.dump(errors, fp, ensure_ascii=False, indent=4)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =reduce_by_error(errors)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =reduce_by_model(errors)\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =make_github_table(reduced_by_error)\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] =make_github_table_per_model(reduced_by_model)\r\n\r\n\t\twith open(os.path.join(args.output_dir, 'reduced_by_error.txt'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tfp.write(sa)\r\n\t\twith open(os.path.join(args.output_dir, 'reduced_by_model.txt'), 'w', encoding='UTF-8') as fp:\r\n\t\t\t\tfp.write(sa)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305179,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\r\nimport unittest\r\n\r\nfrom transformers import TrOCRConfig\r\nfrom transformers.testing_utils import is_torch_available, require_torch, torch_device\r\n\r\nfrom ...generation.test_utils import GenerationTesterMixin\r\nfrom ...test_configuration_common import ConfigTester\r\nfrom ...test_modeling_common import ModelTesterMixin, ids_tensor\r\nfrom ...test_pipeline_mixin import PipelineTesterMixin\r\n\r\n\r\nif is_torch_available():\r\n\t\timport torch\r\n\r\n\t\tfrom transformers.models.trocr.modeling_trocr import TrOCRDecoder, TrOCRForCausalLM\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_torch\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=99\t\t\t\t\t\t, lowercase_=13\t\t\t\t\t\t, lowercase_=16\t\t\t\t\t\t, lowercase_=7\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=30\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tparent\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tbatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdecoder_seq_length\r\n\t\t\t\t\t\t\t\t\t# For common tests\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.decoder_seq_length\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tis_training\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tuse_attention_mask\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tuse_labels\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\td_model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\td_model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tdecoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdecoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tdecoder_ffn_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_attention_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_attention_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\teos_token_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tbos_token_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tpad_token_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_start_token_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tuse_cache\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmax_position_embeddings\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_seq_length\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t2\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tids_tensor([self.batch_size, self.decoder_seq_length]\t\t\t\t\t\t, self.vocab_size )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\tif self.use_attention_mask:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tids_tensor([self.batch_size, self.decoder_seq_length]\t\t\t\t\t\t, vocab_size=2 )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\tif self.use_labels:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tids_tensor([self.batch_size, self.decoder_seq_length]\t\t\t\t\t\t, self.vocab_size )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tTrOCRConfig(\r\n\t\t\t\t\t\t\t\t\t vocab_size=self.vocab_size\t\t\t\t\t\t, d_model=self.d_model\t\t\t\t\t\t, decoder_layers=self.decoder_layers\t\t\t\t\t\t, decoder_ffn_dim=self.decoder_ffn_dim\t\t\t\t\t\t, decoder_attention_heads=self.decoder_attention_heads\t\t\t\t\t\t, eos_token_id=self.eos_token_id\t\t\t\t\t\t, bos_token_id=self.bos_token_id\t\t\t\t\t\t, use_cache=self.use_cache\t\t\t\t\t\t, pad_token_id=self.pad_token_id\t\t\t\t\t\t, decoder_start_token_id=self.decoder_start_token_id\t\t\t\t\t\t, max_position_embeddings=self.max_position_embeddings\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\treturn (config, input_ids, attention_mask, lm_labels)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tTrue\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tTrOCRDecoder(config=_snake_case ).to(_snake_case ).eval()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tinput_ids[:2]\r\n\r\n\t\t\t\t\t\t\t\t\tinput_ids[input_ids == 0] += 1\r\n\t\t\t\t\t\t\t\t\t# first forward pass\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tmodel(_snake_case\t\t\t\t\t\t, use_cache=_snake_case )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmodel(_snake_case )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tmodel(_snake_case\t\t\t\t\t\t, use_cache=_snake_case )\r\n\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(len(_snake_case ) == len(_snake_case ) )\r\n\t\t\t\t\t\t\t\t\tself.parent.assertTrue(len(_snake_case ) == len(_snake_case ) + 1 )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\toutputs[\"\"\"past_key_values\"\"\"]\r\n\r\n\t\t\t\t\t\t\t\t\t# create hypothetical next token and extent to next_input_ids\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tids_tensor((2, 1)\t\t\t\t\t\t, config.vocab_size - 1 ) + 1\r\n\r\n\t\t\t\t\t\t\t\t\t# append to next input_ids and\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttorch.cat([input_ids, next_tokens]\t\t\t\t\t\t, dim=-1 )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmodel(_snake_case )[\"\"\"last_hidden_state\"\"\"]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tmodel(_snake_case\t\t\t\t\t\t, past_key_values=_snake_case )[\"\"\"last_hidden_state\"\"\"]\r\n\r\n\t\t\t\t\t\t\t\t\t# select random slice\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tids_tensor((1,)\t\t\t\t\t\t, output_from_past.shape[-1] ).item()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\toutput_from_no_past[:, next_input_ids.shape[-1] - 1, random_slice_idx].detach()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\toutput_from_past[:, 0, random_slice_idx].detach()\r\n\r\n\t\t\t\t\t\t\t\t\t# test that outputs are equal for slice\r\n\t\t\t\t\t\t\t\t\tassert torch.allclose(_snake_case\t\t\t\t\t\t, _snake_case\t\t\t\t\t\t, atol=1e-3 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.prepare_config_and_inputs()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : int\t\t\t\t =\t\t\t\tconfig_and_inputs\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\"\"\"input_ids\"\"\": input_ids, \"\"\"attention_mask\"\"\": attention_mask}\r\n\t\t\t\t\t\t\t\t\treturn config, inputs_dict\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_torch\r\nclass _a (\t\t\tUpperCAmelCase_ , UpperCAmelCase_ , UpperCAmelCase_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Any =\t\t(TrOCRDecoder, TrOCRForCausalLM) if is_torch_available() else ()\r\n\t\t\t_UpperCamelCase: List[str] =\t\t(TrOCRForCausalLM,) if is_torch_available() else ()\r\n\t\t\t_UpperCamelCase: List[str] =\t\t{\"text-generation\": TrOCRForCausalLM} if is_torch_available() else {}\r\n\t\t\t_UpperCamelCase: Dict =\t\tTrue\r\n\t\t\t_UpperCamelCase: str =\t\tFalse\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tTrOCRStandaloneDecoderModelTester(self\t\t\t\t\t\t, is_training=_snake_case )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tConfigTester(self\t\t\t\t\t\t, config_class=_snake_case )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tself.config_tester.run_common_tests()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself.model_tester.prepare_config_and_inputs()\r\n\t\t\t\t\t\t\t\t\tself.model_tester.create_and_check_decoder_model_past(*_snake_case )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skip(\"\"\"The model doesn\\'t support left padding\"\"\" ) # and it's not used enough to be worth fixing :)\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n"},"code_codestyle":{"kind":"number","value":708,"string":"708"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\n# rely on isort to merge the imports\r\nfrom ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] ={\r\n 'configuration_autoformer': [\r\n 'AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP',\r\n 'AutoformerConfig',\r\n ],\r\n}\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =[\r\n\t\t 'AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'AutoformerForPrediction',\r\n\t\t 'AutoformerModel',\r\n\t\t 'AutoformerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_autoformer import (\r\n\t\t AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP,\r\n\t\t AutoformerConfig,\r\n\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_autoformer import (\r\n\t\t\t\t AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t AutoformerForPrediction,\r\n\t\t\t\t AutoformerModel,\r\n\t\t\t\t AutoformerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305180,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\r\nimport torch\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif torch.cuda.is_available():\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttorch.cuda.device_count()\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\tprint(F\"\"\"Successfully ran on {num_gpus} GPUs\"\"\" )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tmain()\r\n\r\n"},"code_codestyle":{"kind":"number","value":709,"string":"709"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport copy\r\nfrom collections import OrderedDict\r\nfrom typing import Dict, Mapping\r\n\r\nfrom packaging import version\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...onnx import OnnxConfig\r\nfrom ...utils import logging\r\nfrom ..auto import CONFIG_MAPPING\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] ={\r\n 'facebook/detr-resnet-50': 'https://huggingface.co/facebook/detr-resnet-50/resolve/main/config.json',\r\n # See all DETR models at https://huggingface.co/models?filter=detr\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: List[str] =\t\t\"detr\"\r\n\t\t\t_UpperCamelCase: Dict =\t\t[\"past_key_values\"]\r\n\t\t\t_UpperCamelCase: Optional[int] =\t\t{\r\n\t\t\t \"hidden_size\": \"d_model\",\r\n\t\t\t \"num_attention_heads\": \"encoder_attention_heads\",\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=100\t\t\t\t\t\t, lowercase_=6\t\t\t\t\t\t, lowercase_=2048\t\t\t\t\t\t, lowercase_=8\t\t\t\t\t\t, lowercase_=6\t\t\t\t\t\t, lowercase_=2048\t\t\t\t\t\t, lowercase_=8\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=\"relu\"\t\t\t\t\t\t, lowercase_=256\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1.0\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=\"sine\"\t\t\t\t\t\t, lowercase_=\"resnet50\"\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=5\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=5\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tif backbone_config is not None and use_timm_backbone:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"You can't specify both `backbone_config` and `use_timm_backbone`.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tif not use_timm_backbone:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif backbone_config is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlogger.info(\"\"\"`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tCONFIG_MAPPING[\"\"\"resnet\"\"\"](out_features=[\"\"\"stage4\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tbackbone_config.get(\"\"\"model_type\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tCONFIG_MAPPING[backbone_model_type]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tconfig_class.from_dict(lowercase_ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# set timm attributes to None\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tNone, None, None\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tuse_timm_backbone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tbackbone_config\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tnum_queries\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\td_model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tencoder_ffn_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tencoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tencoder_attention_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_ffn_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdecoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tdecoder_attention_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tattention_dropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tactivation_dropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tactivation_function\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tinit_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tinit_xavier_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tencoder_layerdrop\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdecoder_layerdrop\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tencoder_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tauxiliary_loss\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tposition_embedding_type\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tbackbone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tuse_pretrained_backbone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tdilation\r\n\t\t\t\t\t\t\t\t\t# Hungarian matcher\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tclass_cost\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tbbox_cost\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tgiou_cost\r\n\t\t\t\t\t\t\t\t\t# Loss coefficients\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tmask_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdice_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tbbox_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tgiou_loss_coefficient\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\teos_coefficient\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(is_encoder_decoder=lowercase_\t\t\t\t\t\t, **lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self.encoder_attention_heads\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self.d_model\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@classmethod\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tcls\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn cls(backbone_config=lowercase_\t\t\t\t\t\t, **lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict[str, any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tcopy.deepcopy(self.__dict__ )\r\n\t\t\t\t\t\t\t\t\tif output[\"backbone_config\"] is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.backbone_config.to_dict()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.__class__.model_type\r\n\t\t\t\t\t\t\t\t\treturn output\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: Any =\t\tversion.parse(\"1.11\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tMapping[str, Mapping[int, str]]:\r\n\t\t\t\t\t\t\t\t\treturn OrderedDict(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t (\"\"\"pixel_values\"\"\", {0: \"\"\"batch\"\"\", 1: \"\"\"num_channels\"\"\", 2: \"\"\"height\"\"\", 3: \"\"\"width\"\"\"}),\r\n\t\t\t\t\t\t\t\t\t (\"\"\"pixel_mask\"\"\", {0: \"\"\"batch\"\"\"}),\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tfloat:\r\n\t\t\t\t\t\t\t\t\treturn 1e-5\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn 12\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305181,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom __future__ import annotations\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif len(snake_case_ ) == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn array\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tmin(snake_case_ ), max(snake_case_ )\r\n\r\n\t\t\t\t\t\t# Compute the variables\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t_max - _min + 1\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[0] * holes_range, [0] * holes_range\r\n\r\n\t\t\t\t\t\t# Make the sorting.\r\n\t\t\t\t\t\tfor i in array:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ti - _min\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ti\r\n\t\t\t\t\t\t\t\t\t\t\t\tholes_repeat[index] += 1\r\n\r\n\t\t\t\t\t\t# Makes the array back by replacing the numbers.\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\tfor i in range(snake_case_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twhile holes_repeat[i] > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tholes[i]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tindex += 1\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tholes_repeat[i] -= 1\r\n\r\n # Returns the sorted array.\r\n\t\t\t\t\t\treturn array\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =input('Enter numbers separated by comma:\\n')\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =[int(x) for x in user_input.split(',')]\r\n\t\tprint(pigeon_sort(unsorted))\r\n\r\n"},"code_codestyle":{"kind":"number","value":710,"string":"710"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport logging\r\nimport os\r\nimport sys\r\nfrom pathlib import Path\r\n\r\nimport finetune_rag\r\n\r\nfrom transformers.file_utils import is_apex_available\r\nfrom transformers.testing_utils import (\r\n TestCasePlus,\r\n execute_subprocess_async,\r\n require_ray,\r\n require_torch_gpu,\r\n require_torch_multi_gpu,\r\n)\r\n\r\n\r\nlogging.basicConfig(level=logging.DEBUG)\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =logging.getLogger()\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =logging.StreamHandler(sys.stdout)\r\nlogger.addHandler(stream_handler)\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tos.makedirs(lowercase_\t\t\t\t\t\t, exist_ok=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{\"\"\"source\"\"\": \"\"\"What is love ?\"\"\", \"\"\"target\"\"\": \"\"\"life\"\"\"}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\"\"\"train\"\"\": 12, \"\"\"val\"\"\": 2, \"\"\"test\"\"\": 2}\r\n\t\t\t\t\t\t\t\t\tfor split in [\"train\", \"test\", \"val\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor field in [\"source\", \"target\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t\"\"\"\\n\"\"\".join([contents[field]] * n_lines[split] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith open(os.path.join(lowercase_\t\t\t\t\t\t, f\"\"\"{split}.{field}\"\"\" )\t\t\t\t\t\t, \"\"\"w\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tf.write(lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = \"pytorch\" ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.get_auto_remove_tmp_dir()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(lowercase_\t\t\t\t\t\t, \"\"\"output\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tos.path.join(lowercase_\t\t\t\t\t\t, \"\"\"data\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself._create_dummy_data(data_dir=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tf\"\"\"\n --data_dir {data_dir} \\\n --output_dir {output_dir} \\\n --model_name_or_path facebook/rag-sequence-base \\\n --model_type rag_sequence \\\n --do_train \\\n --do_predict \\\n --n_val -1 \\\n --val_check_interval 1.0 \\\n --train_batch_size 2 \\\n --eval_batch_size 1 \\\n --max_source_length 25 \\\n --max_target_length 25 \\\n --val_max_target_length 25 \\\n --test_max_target_length 25 \\\n --label_smoothing 0.1 \\\n --dropout 0.1 \\\n --attention_dropout 0.1 \\\n --weight_decay 0.001 \\\n --adam_epsilon 1e-08 \\\n --max_grad_norm 0.1 \\\n --lr_scheduler polynomial \\\n --learning_rate 3e-04 \\\n --num_train_epochs 1 \\\n --warmup_steps 4 \\\n --gradient_accumulation_steps 1 \\\n --distributed-port 8787 \\\n --use_dummy_dataset 1 \\\n --distributed_retriever {distributed_retriever} \\\n \"\"\".split()\r\n\r\n\t\t\t\t\t\t\t\t\tif gpus > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(f\"\"\"--gpus={gpus}\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif is_apex_available():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--fp16\"\"\" )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--gpus=0\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--distributed_backend=ddp_cpu\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttestargs.append(\"\"\"--num_processes=2\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[sys.executable, str(Path(finetune_rag.__file__ ).resolve() )] + testargs\r\n\t\t\t\t\t\t\t\t\texecute_subprocess_async(lowercase_\t\t\t\t\t\t, env=self.get_env() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(lowercase_\t\t\t\t\t\t, \"\"\"metrics.json\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith open(lowercase_ ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tjson.load(lowercase_ )\r\n\t\t\t\t\t\t\t\t\treturn result\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_gpu\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself._run_finetune(gpus=1 )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_multi_gpu\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself._run_finetune(gpus=2 )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_gpu\r\n\t\t\t@require_ray\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself._run_finetune(gpus=1\t\t\t\t\t\t, distributed_retriever=\"\"\"ray\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch_multi_gpu\r\n\t\t\t@require_ray\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself._run_finetune(gpus=1\t\t\t\t\t\t, distributed_retriever=\"\"\"ray\"\"\" )\r\n\t\t\t\t\t\t\t\t\tself.assertGreaterEqual(result[\"\"\"test\"\"\"][0][\"\"\"test_avg_em\"\"\"]\t\t\t\t\t\t, 0.2 )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305182,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport os\r\nimport tempfile\r\n\r\nfrom transformers.testing_utils import check_json_file_has_correct_format\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\t\t\t_UpperCamelCase: Dict =\t\tNone\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.feature_extraction_class(**self.feat_extract_dict )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tjson.loads(feat_extract.to_json_string() )\r\n\t\t\t\t\t\t\t\t\tfor key, value in self.feat_extract_dict.items():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(obj[key]\t\t\t\t\t\t, UpperCamelCase__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tself.feature_extraction_class(**self.feat_extract_dict )\r\n\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tos.path.join(UpperCamelCase__\t\t\t\t\t\t, \"\"\"feat_extract.json\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfeat_extract_first.to_json_file(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.feature_extraction_class.from_json_file(UpperCamelCase__ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(feat_extract_second.to_dict()\t\t\t\t\t\t, feat_extract_first.to_dict() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tself.feature_extraction_class(**self.feat_extract_dict )\r\n\r\n\t\t\t\t\t\t\t\t\twith tempfile.TemporaryDirectory() as tmpdirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tfeat_extract_first.save_pretrained(UpperCamelCase__ )[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcheck_json_file_has_correct_format(UpperCamelCase__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.feature_extraction_class.from_pretrained(UpperCamelCase__ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(feat_extract_second.to_dict()\t\t\t\t\t\t, feat_extract_first.to_dict() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.feature_extraction_class()\r\n\t\t\t\t\t\t\t\t\tself.assertIsNotNone(UpperCamelCase__ )\r\n\r\n"},"code_codestyle":{"kind":"number","value":711,"string":"711"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] ={\r\n 'transfo-xl-wt103': 'https://huggingface.co/transfo-xl-wt103/resolve/main/config.json',\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\t\"transfo-xl\"\r\n\t\t\t_UpperCamelCase: str =\t\t[\"mems\"]\r\n\t\t\t_UpperCamelCase: Dict =\t\t{\r\n\t\t\t \"n_token\": \"vocab_size\",\r\n\t\t\t \"hidden_size\": \"d_model\",\r\n\t\t\t \"num_attention_heads\": \"n_head\",\r\n\t\t\t \"num_hidden_layers\": \"n_layer\",\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=267735\t\t\t\t\t\t, lowercase_=[20000, 40000, 200000]\t\t\t\t\t\t, lowercase_=1024\t\t\t\t\t\t, lowercase_=1024\t\t\t\t\t\t, lowercase_=16\t\t\t\t\t\t, lowercase_=64\t\t\t\t\t\t, lowercase_=4096\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=18\t\t\t\t\t\t, lowercase_=1600\t\t\t\t\t\t, lowercase_=1000\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=-1\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=\"normal\"\t\t\t\t\t\t, lowercase_=0.0_1\t\t\t\t\t\t, lowercase_=0.0_1\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-5\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tself.cutoffs.extend(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tif proj_share_all_but_first:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[False] + [True] * len(self.cutoffs )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[False] + [False] * len(self.cutoffs )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\td_model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\td_embed\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\td_head\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\td_inner\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdiv_val\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tpre_lnorm\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tn_layer\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tn_head\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tmem_len\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tsame_length\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tattn_type\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tclamp_len\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tsample_softmax\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tadaptive\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tdropatt\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tuntie_r\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tinit\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tinit_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tproj_init_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tinit_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tlayer_norm_epsilon\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(eos_token_id=lowercase_\t\t\t\t\t\t, **lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\t# Message copied from Transformer-XL documentation\r\n\t\t\t\t\t\t\t\t\tlogger.info(f\"\"\"The model {self.model_type} is one of the few models that has no sequence length limit.\"\"\" )\r\n\t\t\t\t\t\t\t\t\treturn -1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@max_position_embeddings.setter\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\t# Message copied from Transformer-XL documentation\r\n\t\t\t\t\t\t\t\t\traise NotImplementedError(\r\n\t\t\t\t\t\t\t\t\t f\"\"\"The model {self.model_type} is one of the few models that has no sequence length limit.\"\"\" )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305183,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nfrom typing import Iterator, List, Union\r\n\r\nfrom tokenizers import AddedToken, Regex, Tokenizer, decoders, normalizers, pre_tokenizers, trainers\r\nfrom tokenizers.implementations.base_tokenizer import BaseTokenizer\r\nfrom tokenizers.models import Unigram\r\nfrom tokenizers.processors import TemplateProcessing\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tlowerCAmelCase__ ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ = \"▁\"\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, lowercase_ = \"\"\t\t\t\t\t\t, lowercase_ = \"\"\t\t\t\t\t\t, lowercase_ = \"\"\t\t\t\t\t\t, ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"pad\"\"\": {\"\"\"id\"\"\": 0, \"\"\"token\"\"\": pad_token},\r\n\t\t\t\t\t\t\t\t\t \"\"\"eos\"\"\": {\"\"\"id\"\"\": 1, \"\"\"token\"\"\": eos_token},\r\n\t\t\t\t\t\t\t\t\t \"\"\"unk\"\"\": {\"\"\"id\"\"\": 2, \"\"\"token\"\"\": unk_token},\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t[None] * len(self.special_tokens )\r\n\t\t\t\t\t\t\t\t\tfor token_dict in self.special_tokens.values():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttoken_dict[\"\"\"token\"\"\"]\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tTokenizer(Unigram() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnormalizers.Sequence(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t normalizers.Nmt(),\r\n\t\t\t\t\t\t\t\t\t normalizers.NFKC(),\r\n\t\t\t\t\t\t\t\t\t normalizers.Replace(Regex(\"\"\" {2,}\"\"\" )\t\t\t\t\t\t, \"\"\" \"\"\" ),\r\n\t\t\t\t\t\t\t\t\t normalizers.Lowercase(),\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tpre_tokenizers.Sequence(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t pre_tokenizers.Metaspace(replacement=_lowerCamelCase\t\t\t\t\t\t, add_prefix_space=_lowerCamelCase ),\r\n\t\t\t\t\t\t\t\t\t pre_tokenizers.Digits(individual_digits=_lowerCamelCase ),\r\n\t\t\t\t\t\t\t\t\t pre_tokenizers.Punctuation(),\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdecoders.Metaspace(replacement=_lowerCamelCase\t\t\t\t\t\t, add_prefix_space=_lowerCamelCase )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tTemplateProcessing(\r\n\t\t\t\t\t\t\t\t\t single=f\"\"\"$A {self.special_tokens['eos']['token']}\"\"\"\t\t\t\t\t\t, special_tokens=[(self.special_tokens[\"\"\"eos\"\"\"][\"\"\"token\"\"\"], self.special_tokens[\"\"\"eos\"\"\"][\"\"\"id\"\"\"])]\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"model\"\"\": \"\"\"SentencePieceUnigram\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"replacement\"\"\": replacement,\r\n\t\t\t\t\t\t\t\t\t \"\"\"add_prefix_space\"\"\": add_prefix_space,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(_lowerCamelCase\t\t\t\t\t\t, _lowerCamelCase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = 8000\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttrainers.UnigramTrainer(\r\n\t\t\t\t\t\t\t\t\t vocab_size=_lowerCamelCase\t\t\t\t\t\t, special_tokens=self.special_tokens_list\t\t\t\t\t\t, show_progress=_lowerCamelCase\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\tif isinstance(_lowerCamelCase\t\t\t\t\t\t, _lowerCamelCase ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[files]\r\n\t\t\t\t\t\t\t\t\tself._tokenizer.train(_lowerCamelCase\t\t\t\t\t\t, trainer=_lowerCamelCase )\r\n\r\n\t\t\t\t\t\t\t\t\tself.add_unk_id()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = 8000\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttrainers.UnigramTrainer(\r\n\t\t\t\t\t\t\t\t\t vocab_size=_lowerCamelCase\t\t\t\t\t\t, special_tokens=self.special_tokens_list\t\t\t\t\t\t, show_progress=_lowerCamelCase\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\tself._tokenizer.train_from_iterator(_lowerCamelCase\t\t\t\t\t\t, trainer=_lowerCamelCase )\r\n\r\n\t\t\t\t\t\t\t\t\tself.add_unk_id()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tjson.loads(self._tokenizer.to_str() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.special_tokens[\"\"\"unk\"\"\"][\"\"\"id\"\"\"]\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tTokenizer.from_str(json.dumps(_lowerCamelCase ) )\r\n\r\n"},"code_codestyle":{"kind":"number","value":712,"string":"712"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport torch\r\n\r\nfrom diffusers import DiffusionPipeline\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tsuper().__init__()\r\n\r\n\t\t\t\t\t\t\t\t\tself.register_modules(unet=lowercase_\t\t\t\t\t\t, scheduler=lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __call__(\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.randn(\r\n\t\t\t\t\t\t\t\t\t (1, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size)\t\t\t\t\t\t, )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t1\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.unet(lowercase_\t\t\t\t\t\t, lowercase_ ).sample\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.scheduler.step(lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ).prev_sample\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tscheduler_output - scheduler_output + torch.ones_like(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\treturn result\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305184,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[3_1, 2_8, 3_1, 3_0, 3_1, 3_0, 3_1, 3_1, 3_0, 3_1, 3_0, 3_1]\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t6\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t1\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t1_9_0_1\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t0\r\n\r\n\t\t\t\t\t\twhile year < 2_0_0_1:\r\n\t\t\t\t\t\t\t\t\t\t\t\tday += 7\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tif (year % 4 == 0 and year % 1_0_0 != 0) or (year % 4_0_0 == 0):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif day > days_per_month[month - 1] and month != 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmonth += 1\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tday - days_per_month[month - 2]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif day > 2_9 and month == 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmonth += 1\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tday - 2_9\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif day > days_per_month[month - 1]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmonth += 1\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tday - days_per_month[month - 2]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tif month > 1_2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tyear += 1\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t1\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tif year < 2_0_0_1 and day == 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tsundays += 1\r\n\t\t\t\t\t\treturn sundays\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tprint(solution())\r\n\r\n\r\n\r\n"},"code_codestyle":{"kind":"number","value":713,"string":"713"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport pytest\r\nimport requests\r\n\r\nfrom datasets.utils.file_utils import http_head\r\n\r\nfrom .utils import OfflineSimulationMode, RequestWouldHangIndefinitelyError, offline\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.integration\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith offline(OfflineSimulationMode.CONNECTION_TIMES_OUT ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trequests.request(\"\"\"GET\"\"\" ,\"\"\"https://huggingface.co\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(requests.exceptions.ConnectTimeout ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trequests.request(\"\"\"GET\"\"\" ,\"\"\"https://huggingface.co\"\"\" ,timeout=1.0 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@pytest.mark.integration\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith offline(OfflineSimulationMode.CONNECTION_FAILS ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(requests.exceptions.ConnectionError ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\trequests.request(\"\"\"GET\"\"\" ,\"\"\"https://huggingface.co\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith offline(OfflineSimulationMode.HF_DATASETS_OFFLINE_SET_TO_1 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twith pytest.raises(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\thttp_head(\"\"\"https://huggingface.co\"\"\" )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305185,"cells":{"code":{"kind":"string","value":"import math\r\nimport sys\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif number != int(a__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"the value of input must be a natural number\"\"\" )\r\n\t\t\t\t\t\tif number < 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"the value of input must not be a negative number\"\"\" )\r\n\t\t\t\t\t\tif number == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn 1\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[-1] * (number + 1)\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\tfor i in range(1 ,number + 1 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tsys.maxsize\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tint(math.sqrt(a__ ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor j in range(1 ,root + 1 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t1 + answers[i - (j**2)]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tmin(a__ ,a__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tanswer\r\n\t\t\t\t\t\treturn answers[number]\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\r\n"},"code_codestyle":{"kind":"number","value":714,"string":"714"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport pathlib\r\nimport unittest\r\n\r\nimport numpy as np\r\n\r\nfrom transformers.testing_utils import require_torch, require_vision, slow\r\nfrom transformers.utils import is_torch_available, is_vision_available\r\n\r\nfrom ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs\r\n\r\n\r\nif is_torch_available():\r\n\t\timport torch\r\n\r\nif is_vision_available():\r\n\t\tfrom PIL import Image\r\n\r\n\t\tfrom transformers import DetrImageProcessor\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tunittest.TestCase ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=7\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=30\t\t\t\t\t\t, lowercase_=400\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=1 / 255\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=[0.5, 0.5, 0.5]\t\t\t\t\t\t, lowercase_=[0.5, 0.5, 0.5]\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\t# by setting size[\"longest_edge\"] > max_resolution we're effectively not testing this :p\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tsize if size is not None else {\"\"\"shortest_edge\"\"\": 18, \"\"\"longest_edge\"\"\": 1333}\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tparent\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tbatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmin_resolution\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tmax_resolution\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdo_resize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tsize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdo_rescale\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\trescale_factor\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdo_normalize\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\timage_mean\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\timage_std\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdo_pad\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn {\r\n\t\t\t\t\t\t\t\t\t \"do_resize\": self.do_resize,\r\n\t\t\t\t\t\t\t\t\t \"size\": self.size,\r\n\t\t\t\t\t\t\t\t\t \"do_rescale\": self.do_rescale,\r\n\t\t\t\t\t\t\t\t\t \"rescale_factor\": self.rescale_factor,\r\n\t\t\t\t\t\t\t\t\t \"do_normalize\": self.do_normalize,\r\n\t\t\t\t\t\t\t\t\t \"image_mean\": self.image_mean,\r\n\t\t\t\t\t\t\t\t\t \"image_std\": self.image_std,\r\n\t\t\t\t\t\t\t\t\t \"do_pad\": self.do_pad,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=False ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tif not batched:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage_inputs[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif isinstance(lowercase_\t\t\t\t\t\t, Image.Image ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\timage.size\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage.shape[1], image.shape[2]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif w < h:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tint(self.size[\"\"\"shortest_edge\"\"\"] * h / w )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif w > h:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tint(self.size[\"\"\"shortest_edge\"\"\"] * w / h )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.size[\"\"\"shortest_edge\"\"\"]\r\n\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : int\t\t\t\t =\t\t\t\tself.get_expected_values([image] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texpected_values.append((expected_height, expected_width) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tmax(lowercase_\t\t\t\t\t\t, key=lambda lowercase_ : item[0] )[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmax(lowercase_\t\t\t\t\t\t, key=lambda lowercase_ : item[1] )[1]\r\n\r\n\t\t\t\t\t\t\t\t\treturn expected_height, expected_width\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_torch\r\n@require_vision\r\nclass _a (\t\t\tsnake_case_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Optional[Any] =\t\tDetrImageProcessor if is_vision_available() else None\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tDetrImageProcessingTester(self )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn self.image_processor_tester.prepare_image_processor_dict()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"image_mean\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"image_std\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_normalize\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_rescale\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"rescale_factor\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_resize\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"size\"\"\" ) )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(hasattr(lowercase_\t\t\t\t\t\t, \"\"\"do_pad\"\"\" ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.image_processing_class.from_dict(self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.size\t\t\t\t\t\t, {\"\"\"shortest_edge\"\"\": 18, \"\"\"longest_edge\"\"\": 1333} )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.do_pad\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.image_processing_class.from_dict(\r\n\t\t\t\t\t\t\t\t\t self.image_processor_dict\t\t\t\t\t\t, size=42\t\t\t\t\t\t, max_size=84\t\t\t\t\t\t, pad_and_return_pixel_mask=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.size\t\t\t\t\t\t, {\"\"\"shortest_edge\"\"\": 42, \"\"\"longest_edge\"\"\": 84} )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(image_processor.do_pad\t\t\t\t\t\t, lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\t# Initialize image_processing\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random PIL images\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(lowercase_\t\t\t\t\t\t, Image.Image )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (1, self.image_processor_tester.num_channels, expected_height, expected_width)\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_\t\t\t\t\t\t, batched=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_processing(lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t expected_height,\r\n\t\t\t\t\t\t\t\t\t expected_width,\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\t# Initialize image_processing\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random numpy tensors\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=lowercase_\t\t\t\t\t\t, numpify=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(lowercase_\t\t\t\t\t\t, np.ndarray )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (1, self.image_processor_tester.num_channels, expected_height, expected_width)\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\timage_processing(lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : int\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_\t\t\t\t\t\t, batched=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t expected_height,\r\n\t\t\t\t\t\t\t\t\t expected_width,\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\t# Initialize image_processing\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.image_processing_class(**self.image_processor_dict )\r\n\t\t\t\t\t\t\t\t\t# create random PyTorch tensors\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tprepare_image_inputs(self.image_processor_tester\t\t\t\t\t\t, equal_resolution=lowercase_\t\t\t\t\t\t, torchify=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tfor image in image_inputs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(lowercase_\t\t\t\t\t\t, torch.Tensor )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test not batched input\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_processing(image_inputs[0]\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : str\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (1, self.image_processor_tester.num_channels, expected_height, expected_width)\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t# Test batched\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\timage_processing(lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" ).pixel_values\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.image_processor_tester.get_expected_values(lowercase_\t\t\t\t\t\t, batched=lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t encoded_images.shape\t\t\t\t\t\t, (\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.batch_size,\r\n\t\t\t\t\t\t\t\t\t self.image_processor_tester.num_channels,\r\n\t\t\t\t\t\t\t\t\t expected_height,\r\n\t\t\t\t\t\t\t\t\t expected_width,\r\n\t\t\t\t\t\t\t\t\t )\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@slow\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\t# prepare image and target\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tImage.open(\"\"\"./tests/fixtures/tests_samples/COCO/000000039769.png\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith open(\"\"\"./tests/fixtures/tests_samples/COCO/coco_annotations.txt\"\"\"\t\t\t\t\t\t, \"\"\"r\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tjson.loads(f.read() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t{\"\"\"image_id\"\"\": 39769, \"\"\"annotations\"\"\": target}\r\n\r\n\t\t\t\t\t\t\t\t\t# encode them\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tDetrImageProcessor.from_pretrained(\"\"\"facebook/detr-resnet-50\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\timage_processing(images=lowercase_\t\t\t\t\t\t, annotations=lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify pixel values\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.Size([1, 3, 800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"pixel_values\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"pixel_values\"\"\"][0, 0, 0, :3]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-4 ) )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify area\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.tensor([5_8_8_7.9_6_0_0, 1_1_2_5_0.2_0_6_1, 4_8_9_3_5_3.8_4_3_8, 8_3_7_1_2_2.7_5_0_0, 1_4_7_9_6_7.5_1_5_6, 1_6_5_7_3_2.3_4_3_8] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"area\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify boxes\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.Size([6, 4] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttorch.tensor([0.5_5_0_3, 0.2_7_6_5, 0.0_6_0_4, 0.2_2_1_5] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"][0]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-3 ) )\r\n\t\t\t\t\t\t\t\t\t# verify image_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttorch.tensor([39769] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"image_id\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify is_crowd\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([0, 0, 0, 0, 0, 0] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"iscrowd\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify class_labels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([75, 75, 63, 65, 17, 17] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"class_labels\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify orig_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttorch.tensor([480, 640] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"orig_size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@slow\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\t# prepare image, target and masks_path\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tImage.open(\"\"\"./tests/fixtures/tests_samples/COCO/000000039769.png\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith open(\"\"\"./tests/fixtures/tests_samples/COCO/coco_panoptic_annotations.txt\"\"\"\t\t\t\t\t\t, \"\"\"r\"\"\" ) as f:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tjson.loads(f.read() )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{\"\"\"file_name\"\"\": \"\"\"000000039769.png\"\"\", \"\"\"image_id\"\"\": 39769, \"\"\"segments_info\"\"\": target}\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tpathlib.Path(\"\"\"./tests/fixtures/tests_samples/COCO/coco_panoptic\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# encode them\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tDetrImageProcessor.from_pretrained(\"\"\"facebook/detr-resnet-50-panoptic\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage_processing(images=lowercase_\t\t\t\t\t\t, annotations=lowercase_\t\t\t\t\t\t, masks_path=lowercase_\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify pixel values\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.Size([1, 3, 800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"pixel_values\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"pixel_values\"\"\"][0, 0, 0, :3]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-4 ) )\r\n\r\n\t\t\t\t\t\t\t\t\t# verify area\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([1_4_7_9_7_9.6_8_7_5, 1_6_5_5_2_7.0_4_6_9, 4_8_4_6_3_8.5_9_3_8, 1_1_2_9_2.9_3_7_5, 5_8_7_9.6_5_6_2, 7_6_3_4.1_1_4_7] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"area\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify boxes\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttorch.Size([6, 4] )\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"].shape\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttorch.tensor([0.2_6_2_5, 0.5_4_3_7, 0.4_6_8_8, 0.8_6_2_5] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"boxes\"\"\"][0]\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, atol=1e-3 ) )\r\n\t\t\t\t\t\t\t\t\t# verify image_id\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.tensor([39769] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"image_id\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify is_crowd\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttorch.tensor([0, 0, 0, 0, 0, 0] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"iscrowd\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify class_labels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.tensor([17, 17, 63, 75, 75, 93] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"class_labels\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify masks\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t822873\r\n\t\t\t\t\t\t\t\t\tself.assertEqual(encoding[\"\"\"labels\"\"\"][0][\"\"\"masks\"\"\"].sum().item()\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\t# verify orig_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.tensor([480, 640] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"orig_size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\t\t\t\t\t\t\t\t\t# verify size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.tensor([800, 1066] )\r\n\t\t\t\t\t\t\t\t\tself.assertTrue(torch.allclose(encoding[\"\"\"labels\"\"\"][0][\"\"\"size\"\"\"]\t\t\t\t\t\t, lowercase_ ) )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305186,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tlen(UpperCamelCase__ ), len(grid[0] )\r\n\t\t\t\t\t\tif (\r\n\t\t\t\t\t\t min(UpperCamelCase__ ,UpperCamelCase__ ) < 0\r\n\t\t\t\t\t\t or row == row_length\r\n\t\t\t\t\t\t or col == col_length\r\n\t\t\t\t\t\t or (row, col) in visit\r\n\t\t\t\t\t\t or grid[row][col] == 1\r\n\t\t\t\t\t\t):\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn 0\r\n\t\t\t\t\t\tif row == row_length - 1 and col == col_length - 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn 1\r\n\r\n\t\t\t\t\t\tvisit.add((row, col) )\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\tcount += depth_first_search(UpperCamelCase__ ,row + 1 ,UpperCamelCase__ ,UpperCamelCase__ )\r\n\t\t\t\t\t\tcount += depth_first_search(UpperCamelCase__ ,row - 1 ,UpperCamelCase__ ,UpperCamelCase__ )\r\n\t\t\t\t\t\tcount += depth_first_search(UpperCamelCase__ ,UpperCamelCase__ ,col + 1 ,UpperCamelCase__ )\r\n\t\t\t\t\t\tcount += depth_first_search(UpperCamelCase__ ,UpperCamelCase__ ,col - 1 ,UpperCamelCase__ )\r\n\r\n\t\t\t\t\t\tvisit.remove((row, col) )\r\n\t\t\t\t\t\treturn count\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\t\t\t\t\t\timport doctest\r\n\r\n\t\t\t\t\t\t\tdoctest.testmod()\r\n\r\n"},"code_codestyle":{"kind":"number","value":715,"string":"715"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\twhile b > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif b & 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tres += a\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\ta += a\r\n\t\t\t\t\t\t\t\t\t\t\t\tb >>= 1\r\n\r\n\t\t\t\t\t\treturn res\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\twhile b > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif b & 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t((res % c) + (a % c)) % c\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\ta += a\r\n\t\t\t\t\t\t\t\t\t\t\t\tb >>= 1\r\n\r\n\t\t\t\t\t\treturn res\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305187,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import List, Optional, Union\r\n\r\nimport torch\r\n\r\nfrom ...models import UNetaDConditionModel, VQModel\r\nfrom ...pipelines import DiffusionPipeline\r\nfrom ...pipelines.pipeline_utils import ImagePipelineOutput\r\nfrom ...schedulers import DDPMScheduler\r\nfrom ...utils import (\r\n is_accelerate_available,\r\n is_accelerate_version,\r\n logging,\r\n randn_tensor,\r\n replace_example_docstring,\r\n)\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =logging.get_logger(__name__) # pylint: disable=invalid-name\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] ='''\n Examples:\n ```py\n >>> from diffusers import KandinskyV22Pipeline, KandinskyV22PriorPipeline\n >>> import torch\n\n >>> pipe_prior = KandinskyV22PriorPipeline.from_pretrained(\"kandinsky-community/kandinsky-2-2-prior\")\n >>> pipe_prior.to(\"cuda\")\n >>> prompt = \"red cat, 4k photo\"\n >>> out = pipe_prior(prompt)\n >>> image_emb = out.image_embeds\n >>> zero_image_emb = out.negative_image_embeds\n >>> pipe = KandinskyV22Pipeline.from_pretrained(\"kandinsky-community/kandinsky-2-2-decoder\")\n >>> pipe.to(\"cuda\")\n >>> image = pipe(\n ... image_embeds=image_emb,\n ... negative_image_embeds=zero_image_emb,\n ... height=768,\n ... width=768,\n ... num_inference_steps=50,\n ... ).images\n >>> image[0].save(\"cat.png\")\n ```\n'''\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=8 ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\theight // scale_factor**2\r\n if height % scale_factor**2 != 0:\r\n new_height += 1\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\twidth // scale_factor**2\r\n if width % scale_factor**2 != 0:\r\n new_width += 1\r\n return new_height * scale_factor, new_width * scale_factor\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\t__UpperCAmelCase ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n def __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tint:\r\n super().__init__()\r\n\r\n self.register_modules(\r\n unet=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, scheduler=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, movq=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, )\r\n lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t2 ** (len(self.movq.config.block_out_channels ) - 1)\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tint:\r\n\r\n if latents is None:\r\n lowerCAmelCase : Any\t\t\t\t =\t\t\t\trandn_tensor(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, generator=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, device=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, dtype=__SCREAMING_SNAKE_CASE )\r\n else:\r\n if latents.shape != shape:\r\n raise ValueError(f\"\"\"Unexpected latents shape, got {latents.shape}, expected {shape}\"\"\" )\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tlatents.to(__SCREAMING_SNAKE_CASE )\r\n\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tlatents * scheduler.init_noise_sigma\r\n return latents\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0 ) ->\t\t\t\tTuple:\r\n\r\n if is_accelerate_available():\r\n from accelerate import cpu_offload\r\n else:\r\n raise ImportError(\"\"\"Please install accelerate via `pip install accelerate`\"\"\" )\r\n\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttorch.device(f\"\"\"cuda:{gpu_id}\"\"\" )\r\n\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[\r\n self.unet,\r\n self.movq,\r\n ]\r\n for cpu_offloaded_model in models:\r\n if cpu_offloaded_model is not None:\r\n cpu_offload(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE )\r\n\r\n\r\n\r\n\r\n\r\n\r\n def _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0 ) ->\t\t\t\tList[Any]:\r\n\r\n if is_accelerate_available() and is_accelerate_version(\"\"\">=\"\"\"\t\t\t\t\t\t, \"\"\"0.17.0.dev0\"\"\" ):\r\n from accelerate import cpu_offload_with_hook\r\n else:\r\n raise ImportError(\"\"\"`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.\"\"\" )\r\n\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.device(f\"\"\"cuda:{gpu_id}\"\"\" )\r\n\r\n if self.device.type != \"cpu\":\r\n self.to(\"\"\"cpu\"\"\"\t\t\t\t\t\t, silence_dtype_warnings=__SCREAMING_SNAKE_CASE )\r\n torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist)\r\n\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n for cpu_offloaded_model in [self.unet, self.movq]:\r\n lowerCAmelCase , lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tcpu_offload_with_hook(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, prev_module_hook=__SCREAMING_SNAKE_CASE )\r\n\r\n # We'll offload the last model manually.\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\thook\r\n\r\n\r\n\r\n\r\n\r\n\r\n @property\r\n # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device\r\n def _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\r\n if not hasattr(self.unet\t\t\t\t\t\t, \"\"\"_hf_hook\"\"\" ):\r\n return self.device\r\n for module in self.unet.modules():\r\n if (\r\n hasattr(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, \"\"\"_hf_hook\"\"\" )\r\n and hasattr(module._hf_hook\t\t\t\t\t\t, \"\"\"execution_device\"\"\" )\r\n and module._hf_hook.execution_device is not None\r\n ):\r\n return torch.device(module._hf_hook.execution_device )\r\n return self.device\r\n\r\n\r\n\r\n\r\n\r\n\r\n @torch.no_grad()\r\n @replace_example_docstring(__SCREAMING_SNAKE_CASE )\r\n def __call__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = 512\t\t\t\t\t\t, lowercase_ = 512\t\t\t\t\t\t, lowercase_ = 100\t\t\t\t\t\t, lowercase_ = 4.0\t\t\t\t\t\t, lowercase_ = 1\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = None\t\t\t\t\t\t, lowercase_ = \"pil\"\t\t\t\t\t\t, lowercase_ = True\t\t\t\t\t\t, ) ->\t\t\t\tint:\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself._execution_device\r\n\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tguidance_scale > 1.0\r\n\r\n if isinstance(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE ):\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\ttorch.cat(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, dim=0 )\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\timage_embeds.shape[0] * num_images_per_prompt\r\n if isinstance(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE ):\r\n lowerCAmelCase : List[str]\t\t\t\t =\t\t\t\ttorch.cat(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, dim=0 )\r\n\r\n if do_classifier_free_guidance:\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\timage_embeds.repeat_interleave(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, dim=0 )\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tnegative_image_embeds.repeat_interleave(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, dim=0 )\r\n\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttorch.cat([negative_image_embeds, image_embeds]\t\t\t\t\t\t, dim=0 ).to(dtype=self.unet.dtype\t\t\t\t\t\t, device=__SCREAMING_SNAKE_CASE )\r\n\r\n self.scheduler.set_timesteps(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, device=__SCREAMING_SNAKE_CASE )\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.scheduler.timesteps\r\n\r\n lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.unet.config.in_channels\r\n\r\n lowerCAmelCase , lowerCAmelCase : Any\t\t\t\t =\t\t\t\tdownscale_height_and_width(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, self.movq_scale_factor )\r\n\r\n # create initial latent\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.prepare_latents(\r\n (batch_size, num_channels_latents, height, width)\t\t\t\t\t\t, image_embeds.dtype\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, self.scheduler\t\t\t\t\t\t, )\r\n\r\n for i, t in enumerate(self.progress_bar(__SCREAMING_SNAKE_CASE ) ):\r\n # expand the latents if we are doing classifier free guidance\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\ttorch.cat([latents] * 2 ) if do_classifier_free_guidance else latents\r\n\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\t{\"\"\"image_embeds\"\"\": image_embeds}\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\tself.unet(\r\n sample=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, timestep=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, encoder_hidden_states=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, added_cond_kwargs=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, return_dict=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, )[0]\r\n\r\n if do_classifier_free_guidance:\r\n lowerCAmelCase , lowerCAmelCase : Any\t\t\t\t =\t\t\t\tnoise_pred.split(latents.shape[1]\t\t\t\t\t\t, dim=1 )\r\n lowerCAmelCase , lowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tnoise_pred.chunk(2 )\r\n lowerCAmelCase , lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tvariance_pred.chunk(2 )\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnoise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttorch.cat([noise_pred, variance_pred_text]\t\t\t\t\t\t, dim=1 )\r\n\r\n if not (\r\n hasattr(self.scheduler.config\t\t\t\t\t\t, \"\"\"variance_type\"\"\" )\r\n and self.scheduler.config.variance_type in [\"learned\", \"learned_range\"]\r\n ):\r\n lowerCAmelCase , lowerCAmelCase : int\t\t\t\t =\t\t\t\tnoise_pred.split(latents.shape[1]\t\t\t\t\t\t, dim=1 )\r\n\r\n # compute the previous noisy sample x_t -> x_t-1\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.scheduler.step(\r\n __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, __SCREAMING_SNAKE_CASE\t\t\t\t\t\t, generator=__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, )[0]\r\n # post-processing\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.movq.decode(__SCREAMING_SNAKE_CASE\t\t\t\t\t\t, force_not_quantize=__SCREAMING_SNAKE_CASE )[\"\"\"sample\"\"\"]\r\n\r\n if output_type not in [\"pt\", \"np\", \"pil\"]:\r\n raise ValueError(f\"\"\"Only the output types `pt`, `pil` and `np` are supported not output_type={output_type}\"\"\" )\r\n\r\n if output_type in [\"np\", \"pil\"]:\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\timage * 0.5 + 0.5\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\timage.clamp(0\t\t\t\t\t\t, 1 )\r\n lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\timage.cpu().permute(0\t\t\t\t\t\t, 2\t\t\t\t\t\t, 3\t\t\t\t\t\t, 1 ).float().numpy()\r\n\r\n if output_type == \"pil\":\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.numpy_to_pil(__SCREAMING_SNAKE_CASE )\r\n\r\n if not return_dict:\r\n return (image,)\r\n\r\n return ImagePipelineOutput(images=__SCREAMING_SNAKE_CASE )\r\n\r\n"},"code_codestyle":{"kind":"number","value":716,"string":"716"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom math import factorial\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\treal\r\n\t\t\t\t\t\t\t\t\tif isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[1] * rank\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\trank\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __repr__(\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (\r\n\t\t\t\t\t\t\t\t\t f\"\"\"{self.real}+\"\"\"\r\n\t\t\t\t\t\t\t\t\t f\"\"\"{'+'.join(str(lowercase_ )+'E'+str(n+1 )for n,dual in enumerate(self.duals ) )}\"\"\"\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.duals.copy()\r\n\t\t\t\t\t\t\t\t\twhile cur[-1] == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcur.pop(-1 )\r\n\t\t\t\t\t\t\t\t\treturn Dual(self.real\t\t\t\t\t\t, lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __add__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real + other\t\t\t\t\t\t, self.duals )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself.duals.copy()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tother.duals.copy()\r\n\t\t\t\t\t\t\t\t\tif len(lowercase_ ) > len(lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\to_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) )\r\n\t\t\t\t\t\t\t\t\telif len(lowercase_ ) < len(lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ts_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tfor i in range(len(lowercase_ ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(s_dual[i] + o_dual[i] )\r\n\t\t\t\t\t\t\t\t\treturn Dual(self.real + other.real\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t_UpperCamelCase: List[Any] =\t\t__add__\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __sub__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn self + other * -1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __mul__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor i in self.duals:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(i * other )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real * other\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[0] * (len(self.duals ) + len(other.duals ) + 1)\r\n\t\t\t\t\t\t\t\t\tfor i, item in enumerate(self.duals ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor j, jtem in enumerate(other.duals ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals[i + j + 1] += item * jtem\r\n\t\t\t\t\t\t\t\t\tfor k in range(len(self.duals ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals[k] += self.duals[k] * other.real\r\n\t\t\t\t\t\t\t\t\tfor index in range(len(other.duals ) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals[index] += other.duals[index] * self.real\r\n\t\t\t\t\t\t\t\t\treturn Dual(self.real * other.real\t\t\t\t\t\t, lowercase_ )\r\n\r\n\t\t\t_UpperCamelCase: str =\t\t__mul__\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __truediv__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor i in self.duals:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(i / other )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real / other\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\traise ValueError\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __floordiv__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tif not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor i in self.duals:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tnew_duals.append(i // other )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn Dual(self.real // other\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\traise ValueError\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __pow__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tif n < 0 or isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"power must be a positive integer\"\"\" )\r\n\t\t\t\t\t\t\t\t\tif n == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn 1\r\n\t\t\t\t\t\t\t\t\tif n == 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn self\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tself\r\n\t\t\t\t\t\t\t\t\tfor _ in range(n - 1 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tx *= self\r\n\t\t\t\t\t\t\t\t\treturn x\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif not callable(SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"differentiate() requires a function as input for func\"\"\" )\r\n\t\t\t\t\t\tif not isinstance(SCREAMING_SNAKE_CASE__ ,(float, int) ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"differentiate() requires a float as input for position\"\"\" )\r\n\t\t\t\t\t\tif not isinstance(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"differentiate() requires an int as input for order\"\"\" )\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tDual(SCREAMING_SNAKE_CASE__ ,1 )\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tfunc(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\tif order == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn result.real\r\n\t\t\t\t\t\treturn result.duals[order - 1] * factorial(SCREAMING_SNAKE_CASE__ )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\tdef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\t\t\treturn y**2 * y**4\r\n\r\n\t\tprint(differentiate(f, 9, 2))\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305188,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t0\r\n\t\t\t\t\t\tfor ch in input_str:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tord(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tpow(2 ,SCREAMING_SNAKE_CASE__ )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# If we already turned on bit for current character's unicode\r\n\t\t\t\t\t\t\t\t\t\t\t\tif bitmap >> ch_unicode & 1 == 1:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\t\t\t\t\t\t\t\t\t\t\t\tbitmap |= ch_bit_index_on\r\n\t\t\t\t\t\treturn True\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\r\n"},"code_codestyle":{"kind":"number","value":717,"string":"717"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ..utils import DummyObject, requires_backends\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tmetaclass=snake_case_ ):\r\n\t\t\t_UpperCamelCase: List[Any] =\t\t[\"keras_nlp\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, *lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\trequires_backends(self\t\t\t\t\t\t, [\"\"\"keras_nlp\"\"\"] )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305189,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport dataclasses\r\nimport json\r\nimport sys\r\nimport types\r\nfrom argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, ArgumentTypeError\r\nfrom copy import copy\r\nfrom enum import Enum\r\nfrom inspect import isclass\r\nfrom pathlib import Path\r\nfrom typing import Any, Callable, Dict, Iterable, List, Literal, NewType, Optional, Tuple, Union, get_type_hints\r\n\r\nimport yaml\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =NewType('DataClass', Any)\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =NewType('DataClassType', Any)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif isinstance(_lowercase ,_lowercase ):\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn v\r\n\t\t\t\t\t\tif v.lower() in (\"yes\", \"true\", \"t\", \"y\", \"1\"):\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn True\r\n\t\t\t\t\t\telif v.lower() in (\"no\", \"false\", \"f\", \"n\", \"0\"):\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ArgumentTypeError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t F\"\"\"Truthy value expected: got {v} but expected one of yes/no, true/false, t/f, y/n, 1/0 (case insensitive).\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t{str(_lowercase ): choice for choice in choices}\r\n\t\t\t\t\t\treturn lambda SCREAMING_SNAKE_CASE__ : str_to_choice.get(_lowercase ,_lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t*,\r\n SCREAMING_SNAKE_CASE__ = None ,SCREAMING_SNAKE_CASE__ = None ,SCREAMING_SNAKE_CASE__ = dataclasses.MISSING ,SCREAMING_SNAKE_CASE__ = dataclasses.MISSING ,SCREAMING_SNAKE_CASE__ = None ,**SCREAMING_SNAKE_CASE__ ,):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif metadata is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Important, don't use as default param in function signature because dict is mutable and shared across function calls\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t{}\r\n\t\t\t\t\t\tif aliases is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\taliases\r\n\t\t\t\t\t\tif help is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\thelp\r\n\r\n\t\t\t\t\t\treturn dataclasses.field(metadata=_lowercase ,default=_lowercase ,default_factory=_lowercase ,**_lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: List[str] =\t\t42\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\t# To make the default appear when using --help\r\n\t\t\t\t\t\t\t\t\tif \"formatter_class\" not in kwargs:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tArgumentDefaultsHelpFormatter\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(**__A )\r\n\t\t\t\t\t\t\t\t\tif dataclasses.is_dataclass(__A ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[dataclass_types]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tlist(__A )\r\n\t\t\t\t\t\t\t\t\tfor dtype in self.dataclass_types:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself._add_dataclass_arguments(__A )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@staticmethod\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tlowercase_\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tf\"\"\"--{field.name}\"\"\"\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tfield.metadata.copy()\r\n\t\t\t\t\t\t\t\t\t# field.metadata is not used at all by Data Classes,\r\n\t\t\t\t\t\t\t\t\t# it is provided as a third-party extension mechanism.\r\n\t\t\t\t\t\t\t\t\tif isinstance(field.type\t\t\t\t\t\t, __A ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise RuntimeError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"Unresolved type detected, which should have been done with the help of \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"`typing.get_type_hints` method by default\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tkwargs.pop(\"\"\"aliases\"\"\"\t\t\t\t\t\t, [] )\r\n\t\t\t\t\t\t\t\t\tif isinstance(__A\t\t\t\t\t\t, __A ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[aliases]\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tgetattr(field.type\t\t\t\t\t\t, \"\"\"__origin__\"\"\"\t\t\t\t\t\t, field.type )\r\n\t\t\t\t\t\t\t\t\tif origin_type is Union or (hasattr(__A\t\t\t\t\t\t, \"\"\"UnionType\"\"\" ) and isinstance(__A\t\t\t\t\t\t, types.UnionType )):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif str not in field.type.__args__ and (\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t len(field.type.__args__ ) != 2 or type(__A ) not in field.type.__args__\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"Only `Union[X, NoneType]` (i.e., `Optional[X]`) is allowed for `Union` because\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\" the argument parser only supports one type per argument.\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\" Problem encountered in field '{field.name}'.\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif type(__A ) not in field.type.__args__:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# filter `str` in Union\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tfield.type.__args__[0] if field.type.__args__[1] == str else field.type.__args__[1]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tgetattr(field.type\t\t\t\t\t\t, \"\"\"__origin__\"\"\"\t\t\t\t\t\t, field.type )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif bool not in field.type.__args__:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# filter `NoneType` in Union (except for `Union[bool, NoneType]`)\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t field.type.__args__[0] if isinstance(__A\t\t\t\t\t\t, field.type.__args__[1] ) else field.type.__args__[1]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tgetattr(field.type\t\t\t\t\t\t, \"\"\"__origin__\"\"\"\t\t\t\t\t\t, field.type )\r\n\r\n # A variable to store kwargs for a boolean field, if needed\r\n # so that we can init a `no_*` complement argument (see below)\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{}\r\n\t\t\t\t\t\t\t\t\tif origin_type is Literal or (isinstance(field.type\t\t\t\t\t\t, __A ) and issubclass(field.type\t\t\t\t\t\t, __A )):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif origin_type is Literal:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tfield.type.__args__\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[x.value for x in field.type]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tmake_choice_type_function(kwargs[\"\"\"choices\"\"\"] )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif field.default is not dataclasses.MISSING:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tfield.default\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tTrue\r\n\t\t\t\t\t\t\t\t\telif field.type is bool or field.type == Optional[bool]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Copy the currect kwargs to use to instantiate a `no_*` complement argument below.\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# We do not initialize it here because the `no_*` alternative must be instantiated after the real argument\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tcopy(__A )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Hack because type=bool in argparse does not behave as we want.\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tstring_to_bool\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif field.type is bool or (field.default is not None and field.default is not dataclasses.MISSING):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Default value is False if we have no default when of type bool.\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tFalse if field.default is dataclasses.MISSING else field.default\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# This is the value that will get picked if we don't include --field_name in any way\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdefault\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# This tells argparse we accept 0 or 1 value after --field_name\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t\"\"\"?\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# This is the value that will get picked if we do --field_name (without value)\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tTrue\r\n\t\t\t\t\t\t\t\t\telif isclass(__A ) and issubclass(__A\t\t\t\t\t\t, __A ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tfield.type.__args__[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t\"\"\"+\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif field.default_factory is not dataclasses.MISSING:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tfield.default_factory()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif field.default is dataclasses.MISSING:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tTrue\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tfield.type\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif field.default is not dataclasses.MISSING:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tfield.default\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif field.default_factory is not dataclasses.MISSING:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tfield.default_factory()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tTrue\r\n\t\t\t\t\t\t\t\t\tparser.add_argument(__A\t\t\t\t\t\t, *__A\t\t\t\t\t\t, **__A )\r\n\r\n\t\t\t\t\t\t\t\t\t# Add a complement `no_*` argument for a boolean field AFTER the initial field has already been added.\r\n\t\t\t\t\t\t\t\t\t# Order is important for arguments with the same destination!\r\n\t\t\t\t\t\t\t\t\t# We use a copy of earlier kwargs because the original kwargs have changed a lot before reaching down\r\n\t\t\t\t\t\t\t\t\t# here and we do not need those changes/additional keys.\r\n\t\t\t\t\t\t\t\t\tif field.default is True and (field.type is bool or field.type == Optional[bool]):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tFalse\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tparser.add_argument(f\"\"\"--no_{field.name}\"\"\"\t\t\t\t\t\t, action=\"\"\"store_false\"\"\"\t\t\t\t\t\t, dest=field.name\t\t\t\t\t\t, **__A )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tif hasattr(__A\t\t\t\t\t\t, \"\"\"_argument_group_name\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.add_argument_group(dtype._argument_group_name )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself\r\n\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tget_type_hints(__A )\r\n\t\t\t\t\t\t\t\t\texcept NameError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise RuntimeError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"Type resolution failed for {dtype}. Try declaring the class in global scope or \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"removing line of `from __future__ import annotations` which opts in Postponed \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"Evaluation of Annotations (PEP 563)\"\"\" )\r\n\t\t\t\t\t\t\t\t\texcept TypeError as ex:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Remove this block when we drop Python 3.9 support\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif sys.version_info[:2] < (3, 10) and \"unsupported operand type(s) for |\" in str(__A ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t\"\"\".\"\"\".join(map(__A\t\t\t\t\t\t, sys.version_info[:3] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise RuntimeError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"Type resolution failed for {dtype} on Python {python_version}. Try removing \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"line of `from __future__ import annotations` which opts in union types as \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"`X | Y` (PEP 604) via Postponed Evaluation of Annotations (PEP 563). To \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"support Python versions that lower than 3.10, you need to use \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"`typing.Union[X, Y]` instead of `X | Y` and `typing.Optional[X]` instead of \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"`X | None`.\"\"\" ) from ex\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise\r\n\r\n\t\t\t\t\t\t\t\t\tfor field in dataclasses.fields(__A ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif not field.init:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tcontinue\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttype_hints[field.name]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself._parse_dataclass_field(__A\t\t\t\t\t\t, __A )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, ) ->\t\t\t\tTuple[DataClass, ...]:\r\n\r\n\t\t\t\t\t\t\t\t\tif args_file_flag or args_filename or (look_for_args_file and len(sys.argv )):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif args_filename:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\targs_files.append(Path(__A ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telif look_for_args_file and len(sys.argv ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\targs_files.append(Path(sys.argv[0] ).with_suffix(\"\"\".args\"\"\" ) )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# args files specified via command line flag should overwrite default args files so we add them last\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif args_file_flag:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Create special parser just to extract the args_file_flag values\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tArgumentParser()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\targs_file_parser.add_argument(__A\t\t\t\t\t\t, type=__A\t\t\t\t\t\t, action=\"\"\"append\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Use only remaining args for further parsing (remove the args_file_flag)\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\targs_file_parser.parse_known_args(args=__A )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tvars(__A ).get(args_file_flag.lstrip(\"\"\"-\"\"\" )\t\t\t\t\t\t, __A )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif cmd_args_file_paths:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\targs_files.extend([Path(__A ) for p in cmd_args_file_paths] )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor args_file in args_files:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif args_file.exists():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfile_args += args_file.read_text().split()\r\n\r\n # in case of duplicate arguments the last one has precedence\r\n # args specified via the command line should overwrite args from files, so we add them last\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tfile_args + args if args is not None else file_args + sys.argv[1:]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tself.parse_known_args(args=__A )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tfor dtype in self.dataclass_types:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{f.name for f in dataclasses.fields(__A ) if f.init}\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t{k: v for k, v in vars(__A ).items() if k in keys}\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor k in keys:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tdelattr(__A\t\t\t\t\t\t, __A )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdtype(**__A )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\toutputs.append(__A )\r\n\t\t\t\t\t\t\t\t\tif len(namespace.__dict__ ) > 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# additional namespace.\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\toutputs.append(__A )\r\n\t\t\t\t\t\t\t\t\tif return_remaining_strings:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn (*outputs, remaining_args)\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif remaining_args:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(f\"\"\"Some specified arguments are not used by the HfArgumentParser: {remaining_args}\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn (*outputs,)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = False ) ->\t\t\t\tTuple[DataClass, ...]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tset(args.keys() )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[]\r\n\t\t\t\t\t\t\t\t\tfor dtype in self.dataclass_types:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t{f.name for f in dataclasses.fields(__A ) if f.init}\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t{k: v for k, v in args.items() if k in keys}\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tunused_keys.difference_update(inputs.keys() )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tdtype(**__A )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\toutputs.append(__A )\r\n\t\t\t\t\t\t\t\t\tif not allow_extra_keys and unused_keys:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(f\"\"\"Some keys are not used by the HfArgumentParser: {sorted(__A )}\"\"\" )\r\n\t\t\t\t\t\t\t\t\treturn tuple(__A )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = False ) ->\t\t\t\tTuple[DataClass, ...]:\r\n\t\t\t\t\t\t\t\t\twith open(Path(__A )\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as open_json_file:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tjson.loads(open_json_file.read() )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tself.parse_dict(__A\t\t\t\t\t\t, allow_extra_keys=__A )\r\n\t\t\t\t\t\t\t\t\treturn tuple(__A )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_ = False ) ->\t\t\t\tTuple[DataClass, ...]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.parse_dict(yaml.safe_load(Path(__A ).read_text() )\t\t\t\t\t\t, allow_extra_keys=__A )\r\n\t\t\t\t\t\t\t\t\treturn tuple(__A )\r\n\r\n"},"code_codestyle":{"kind":"number","value":718,"string":"718"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...utils import (\r\n OptionalDependencyNotAvailable,\r\n is_torch_available,\r\n is_transformers_available,\r\n is_transformers_version,\r\n)\r\n\r\n\r\ntry:\r\n\t\tif not (is_transformers_available() and is_torch_available() and is_transformers_version('>=', '4.25.0')):\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tfrom ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline\r\nelse:\r\n\t\tfrom .pipeline_unclip import UnCLIPPipeline\r\n\t\tfrom .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline\r\n\t\tfrom .text_proj import UnCLIPTextProjModel\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305190,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport json\r\nimport os\r\nimport unittest\r\n\r\nfrom transformers import BatchEncoding, LEDTokenizer, LEDTokenizerFast\r\nfrom transformers.models.led.tokenization_led import VOCAB_FILES_NAMES\r\nfrom transformers.testing_utils import require_tokenizers, require_torch\r\nfrom transformers.utils import cached_property\r\n\r\nfrom ...test_tokenization_common import TokenizerTesterMixin\r\n\r\n\r\n\r\n\r\n\r\n\r\n@require_tokenizers\r\nclass _a (\t\t\t__snake_case , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\tLEDTokenizer\r\n\t\t\t_UpperCamelCase: str =\t\tLEDTokenizerFast\r\n\t\t\t_UpperCamelCase: Union[str, Any] =\t\tTrue\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tsuper().setUp()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t \"\"\"l\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"o\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"w\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"e\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"r\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"s\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"t\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"i\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"d\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"n\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120l\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120n\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120lo\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120low\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"er\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120lowest\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120newer\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\\u0120wider\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"\"\"\",\r\n\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tdict(zip(_lowercase\t\t\t\t\t\t, range(len(_lowercase ) ) ) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t[\"\"\"#version: 0.2\"\"\", \"\"\"\\u0120 l\"\"\", \"\"\"\\u0120l o\"\"\", \"\"\"\\u0120lo w\"\"\", \"\"\"e r\"\"\", \"\"\"\"\"\"]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t{\"\"\"unk_token\"\"\": \"\"\"\"\"\"}\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(self.tmpdirname\t\t\t\t\t\t, VOCAB_FILES_NAMES[\"\"\"vocab_file\"\"\"] )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.path.join(self.tmpdirname\t\t\t\t\t\t, VOCAB_FILES_NAMES[\"\"\"merges_file\"\"\"] )\r\n\t\t\t\t\t\t\t\t\twith open(self.vocab_file\t\t\t\t\t\t, \"\"\"w\"\"\"\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as fp:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfp.write(json.dumps(_lowercase ) + \"\"\"\\n\"\"\" )\r\n\t\t\t\t\t\t\t\t\twith open(self.merges_file\t\t\t\t\t\t, \"\"\"w\"\"\"\t\t\t\t\t\t, encoding=\"\"\"utf-8\"\"\" ) as fp:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfp.write(\"\"\"\\n\"\"\".join(_lowercase ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tkwargs.update(self.special_tokens_map )\r\n\t\t\t\t\t\t\t\t\treturn self.tokenizer_class.from_pretrained(self.tmpdirname\t\t\t\t\t\t, **_lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tkwargs.update(self.special_tokens_map )\r\n\t\t\t\t\t\t\t\t\treturn self.rust_tokenizer_class.from_pretrained(self.tmpdirname\t\t\t\t\t\t, **_lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn \"lower newer\", \"lower newer\"\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@cached_property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn LEDTokenizer.from_pretrained(\"\"\"allenai/led-base-16384\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@cached_property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn LEDTokenizerFast.from_pretrained(\"\"\"allenai/led-base-16384\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t[\"\"\"A long paragraph for summarization.\"\"\", \"\"\"Another paragraph for summarization.\"\"\"]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[0, 250, 251, 17818, 13, 39186, 1938, 4, 2]\r\n\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttokenizer(_lowercase\t\t\t\t\t\t, max_length=len(_lowercase )\t\t\t\t\t\t, padding=_lowercase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(_lowercase\t\t\t\t\t\t, _lowercase )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual((2, 9)\t\t\t\t\t\t, batch.input_ids.shape )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual((2, 9)\t\t\t\t\t\t, batch.attention_mask.shape )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tbatch.input_ids.tolist()[0]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertListEqual(_lowercase\t\t\t\t\t\t, _lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t[\"\"\"A long paragraph for summarization.\"\"\", \"\"\"Another paragraph for summarization.\"\"\"]\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttokenizer(_lowercase\t\t\t\t\t\t, padding=_lowercase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIn(\"\"\"input_ids\"\"\"\t\t\t\t\t\t, _lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIn(\"\"\"attention_mask\"\"\"\t\t\t\t\t\t, _lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertNotIn(\"\"\"labels\"\"\"\t\t\t\t\t\t, _lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertNotIn(\"\"\"decoder_attention_mask\"\"\"\t\t\t\t\t\t, _lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t \"\"\"Summary of the text.\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"Another summary.\"\"\",\r\n\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttokenizer(text_target=_lowercase\t\t\t\t\t\t, max_length=32\t\t\t\t\t\t, padding=\"\"\"max_length\"\"\"\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(32\t\t\t\t\t\t, targets[\"\"\"input_ids\"\"\"].shape[1] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\ttokenizer(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t [\"\"\"I am a small frog\"\"\" * 1024, \"\"\"I am a small frog\"\"\"]\t\t\t\t\t\t, padding=_lowercase\t\t\t\t\t\t, truncation=_lowercase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertIsInstance(_lowercase\t\t\t\t\t\t, _lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(batch.input_ids.shape\t\t\t\t\t\t, (2, 5122) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[\"\"\"A long paragraph for summarization.\"\"\"]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t \"\"\"Summary of the text.\"\"\",\r\n\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\ttokenizer(_lowercase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttokenizer(text_target=_lowercase\t\t\t\t\t\t, return_tensors=\"\"\"pt\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tinputs[\"\"\"input_ids\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttargets[\"\"\"input_ids\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue((input_ids[:, 0] == tokenizer.bos_token_id).all().item() )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue((labels[:, 0] == tokenizer.bos_token_id).all().item() )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue((input_ids[:, -1] == tokenizer.eos_token_id).all().item() )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertTrue((labels[:, -1] == tokenizer.eos_token_id).all().item() )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@require_torch\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\r\n\t\t\t\t\t\t\t\t\tfor tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t[\"\"\"Summary of the text.\"\"\", \"\"\"Another summary.\"\"\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t[[0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, -1, -1]]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttokenizer(_lowercase\t\t\t\t\t\t, padding=_lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t[[0] * len(_lowercase ) for x in encoded_output[\"\"\"input_ids\"\"\"]]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\ttokenizer.pad(_lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertSequenceEqual(outputs[\"\"\"global_attention_mask\"\"\"]\t\t\t\t\t\t, _lowercase )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\r\n\t\t\t\t\t\t\t\t\tfor tokenizer, pretrained_name, kwargs in self.tokenizers_list:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith self.subTest(f\"\"\"{tokenizer.__class__.__name__} ({pretrained_name})\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.rust_tokenizer_class.from_pretrained(_lowercase\t\t\t\t\t\t, **_lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.tokenizer_class.from_pretrained(_lowercase\t\t\t\t\t\t, **_lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t\"\"\"A, AllenNLP sentence.\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttokenizer_r.encode_plus(_lowercase\t\t\t\t\t\t, add_special_tokens=_lowercase\t\t\t\t\t\t, return_token_type_ids=_lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttokenizer_p.encode_plus(_lowercase\t\t\t\t\t\t, add_special_tokens=_lowercase\t\t\t\t\t\t, return_token_type_ids=_lowercase )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(sum(tokens_r[\"\"\"token_type_ids\"\"\"] )\t\t\t\t\t\t, sum(tokens_p[\"\"\"token_type_ids\"\"\"] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t sum(tokens_r[\"\"\"attention_mask\"\"\"] ) / len(tokens_r[\"\"\"attention_mask\"\"\"] )\t\t\t\t\t\t, sum(tokens_p[\"\"\"attention_mask\"\"\"] ) / len(tokens_p[\"\"\"attention_mask\"\"\"] )\t\t\t\t\t\t, )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\ttokenizer_r.convert_ids_to_tokens(tokens_r[\"\"\"input_ids\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttokenizer_p.convert_ids_to_tokens(tokens_p[\"\"\"input_ids\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertSequenceEqual(tokens_p[\"\"\"input_ids\"\"\"]\t\t\t\t\t\t, [0, 250, 6, 50264, 3823, 487, 21992, 3645, 4, 2] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertSequenceEqual(tokens_r[\"\"\"input_ids\"\"\"]\t\t\t\t\t\t, [0, 250, 6, 50264, 3823, 487, 21992, 3645, 4, 2] )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertSequenceEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t _lowercase\t\t\t\t\t\t, [\"\"\"\"\"\", \"\"\"A\"\"\", \"\"\",\"\"\", \"\"\"\"\"\", \"\"\"ĠAllen\"\"\", \"\"\"N\"\"\", \"\"\"LP\"\"\", \"\"\"Ġsentence\"\"\", \"\"\".\"\"\", \"\"\"\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tself.assertSequenceEqual(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t _lowercase\t\t\t\t\t\t, [\"\"\"\"\"\", \"\"\"A\"\"\", \"\"\",\"\"\", \"\"\"\"\"\", \"\"\"ĠAllen\"\"\", \"\"\"N\"\"\", \"\"\"LP\"\"\", \"\"\"Ġsentence\"\"\", \"\"\".\"\"\", \"\"\"\"\"\"] )\r\n\r\n"},"code_codestyle":{"kind":"number","value":719,"string":"719"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif p < 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"p should not be less than 2!\"\"\" )\r\n\t\t\t\t\t\telif p == 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn True\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t4\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(1 << p) - 1\r\n\t\t\t\t\t\tfor _ in range(p - 2 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t((s * s) - 2) % m\r\n\t\t\t\t\t\treturn s == 0\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\tprint(lucas_lehmer_test(7))\r\n\t\tprint(lucas_lehmer_test(11))\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305191,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n return not any(\r\n neighbour == 1 and colored_vertices[i] == color\r\n for i, neighbour in enumerate(lowercase_ ) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if index == len(lowercase_ ):\r\n return True\r\n\r\n # Recursive Step\r\n for i in range(lowercase_ ):\r\n if valid_coloring(graph[index] ,lowercase_ ,lowercase_ ):\r\n # Color current vertex\r\n lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ti\r\n # Validate coloring\r\n if util_color(lowercase_ ,lowercase_ ,lowercase_ ,index + 1 ):\r\n return True\r\n # Backtrack\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\t-1\r\n return False\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n lowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[-1] * len(lowercase_ )\r\n\r\n if util_color(lowercase_ ,lowercase_ ,lowercase_ ,0 ):\r\n return colored_vertices\r\n\r\n return []\r\n\r\n"},"code_codestyle":{"kind":"number","value":720,"string":"720"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport random\r\nimport unittest\r\n\r\nimport torch\r\n\r\nfrom diffusers import IFImgaImgSuperResolutionPipeline\r\nfrom diffusers.utils import floats_tensor\r\nfrom diffusers.utils.import_utils import is_xformers_available\r\nfrom diffusers.utils.testing_utils import skip_mps, torch_device\r\n\r\nfrom ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS\r\nfrom ..test_pipelines_common import PipelineTesterMixin\r\nfrom . import IFPipelineTesterMixin\r\n\r\n\r\n\r\n\r\n\r\n\r\n@skip_mps\r\nclass _a (\t\t\tsnake_case_ , snake_case_ , unittest.TestCase ):\r\n\t\t\t_UpperCamelCase: Optional[Any] =\t\tIFImgaImgSuperResolutionPipeline\r\n\t\t\t_UpperCamelCase: int =\t\tTEXT_GUIDED_IMAGE_VARIATION_PARAMS - {\"width\", \"height\"}\r\n\t\t\t_UpperCamelCase: Optional[int] =\t\tTEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({\"original_image\"} )\r\n\t\t\t_UpperCamelCase: List[str] =\t\tPipelineTesterMixin.required_optional_params - {\"latents\"}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self._get_superresolution_dummy_components()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_\t\t\t\t\t\t, lowercase_=0 ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tif str(lowercase_ ).startswith(\"\"\"mps\"\"\" ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\ttorch.manual_seed(lowercase_ )\r\n\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ttorch.Generator(device=lowercase_ ).manual_seed(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tfloats_tensor((1, 3, 32, 32)\t\t\t\t\t\t, rng=random.Random(lowercase_ ) ).to(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tfloats_tensor((1, 3, 16, 16)\t\t\t\t\t\t, rng=random.Random(lowercase_ ) ).to(lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"prompt\"\"\": \"\"\"A painting of a squirrel eating a burger\"\"\",\r\n\t\t\t\t\t\t\t\t\t \"\"\"image\"\"\": image,\r\n\t\t\t\t\t\t\t\t\t \"\"\"original_image\"\"\": original_image,\r\n\t\t\t\t\t\t\t\t\t \"\"\"generator\"\"\": generator,\r\n\t\t\t\t\t\t\t\t\t \"\"\"num_inference_steps\"\"\": 2,\r\n\t\t\t\t\t\t\t\t\t \"\"\"output_type\"\"\": \"\"\"numpy\"\"\",\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\treturn inputs\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skipIf(\r\n\t\t\t torch_device != \"\"\"cuda\"\"\" or not is_xformers_available()\t\t\t\t\t\t, reason=\"\"\"XFormers attention is only available with CUDA and `xformers` installed\"\"\"\t\t\t\t\t\t, )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tself._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tself._test_save_load_optional_components()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@unittest.skipIf(torch_device != \"\"\"cuda\"\"\"\t\t\t\t\t\t, reason=\"\"\"float16 requires CUDA\"\"\" )\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\t# Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder\r\n\t\t\t\t\t\t\t\t\tsuper().test_save_load_floataa(expected_max_diff=1e-1 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\tself._test_attention_slicing_forward_pass(expected_max_diff=1e-2 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\tself._test_save_load_local()\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tself._test_inference_batch_single_identical(\r\n\t\t\t\t\t\t\t\t\t expected_max_diff=1e-2\t\t\t\t\t\t, )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305192,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom math import pi, sqrt, tan\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if side_length < 0:\r\n raise ValueError(\"\"\"surface_area_cube() only accepts non-negative values\"\"\" )\r\n return 6 * side_length**2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if length < 0 or breadth < 0 or height < 0:\r\n raise ValueError(\"\"\"surface_area_cuboid() only accepts non-negative values\"\"\" )\r\n return 2 * ((length * breadth) + (breadth * height) + (length * height))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if radius < 0:\r\n raise ValueError(\"\"\"surface_area_sphere() only accepts non-negative values\"\"\" )\r\n return 4 * pi * radius**2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if radius < 0:\r\n raise ValueError(\"\"\"surface_area_hemisphere() only accepts non-negative values\"\"\" )\r\n return 3 * pi * radius**2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if radius < 0 or height < 0:\r\n raise ValueError(\"\"\"surface_area_cone() only accepts non-negative values\"\"\" )\r\n return pi * radius * (radius + (height**2 + radius**2) ** 0.5)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if radius_a < 0 or radius_a < 0 or height < 0:\r\n raise ValueError(\r\n \"\"\"surface_area_conical_frustum() only accepts non-negative values\"\"\" )\r\n lowerCAmelCase : int\t\t\t\t =\t\t\t\t(height**2 + (radius_a - radius_a) ** 2) ** 0.5\r\n return pi * ((slant_height * (radius_a + radius_a)) + radius_a**2 + radius_a**2)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if radius < 0 or height < 0:\r\n raise ValueError(\"\"\"surface_area_cylinder() only accepts non-negative values\"\"\" )\r\n return 2 * pi * radius * (height + radius)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if torus_radius < 0 or tube_radius < 0:\r\n raise ValueError(\"\"\"surface_area_torus() only accepts non-negative values\"\"\" )\r\n if torus_radius < tube_radius:\r\n raise ValueError(\r\n \"\"\"surface_area_torus() does not support spindle or self intersecting tori\"\"\" )\r\n return 4 * pow(lowerCAmelCase__ ,2 ) * torus_radius * tube_radius\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if length < 0 or width < 0:\r\n raise ValueError(\"\"\"area_rectangle() only accepts non-negative values\"\"\" )\r\n return length * width\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if side_length < 0:\r\n raise ValueError(\"\"\"area_square() only accepts non-negative values\"\"\" )\r\n return side_length**2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if base < 0 or height < 0:\r\n raise ValueError(\"\"\"area_triangle() only accepts non-negative values\"\"\" )\r\n return (base * height) / 2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if sidea < 0 or sidea < 0 or sidea < 0:\r\n raise ValueError(\"\"\"area_triangle_three_sides() only accepts non-negative values\"\"\" )\r\n elif sidea + sidea < sidea or sidea + sidea < sidea or sidea + sidea < sidea:\r\n raise ValueError(\"\"\"Given three sides do not form a triangle\"\"\" )\r\n lowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\t(sidea + sidea + sidea) / 2\r\n lowerCAmelCase : str\t\t\t\t =\t\t\t\tsqrt(\r\n semi_perimeter\r\n * (semi_perimeter - sidea)\r\n * (semi_perimeter - sidea)\r\n * (semi_perimeter - sidea) )\r\n return area\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if base < 0 or height < 0:\r\n raise ValueError(\"\"\"area_parallelogram() only accepts non-negative values\"\"\" )\r\n return base * height\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if basea < 0 or basea < 0 or height < 0:\r\n raise ValueError(\"\"\"area_trapezium() only accepts non-negative values\"\"\" )\r\n return 1 / 2 * (basea + basea) * height\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if radius < 0:\r\n raise ValueError(\"\"\"area_circle() only accepts non-negative values\"\"\" )\r\n return pi * radius**2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if radius_x < 0 or radius_y < 0:\r\n raise ValueError(\"\"\"area_ellipse() only accepts non-negative values\"\"\" )\r\n return pi * radius_x * radius_y\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if diagonal_a < 0 or diagonal_a < 0:\r\n raise ValueError(\"\"\"area_rhombus() only accepts non-negative values\"\"\" )\r\n return 1 / 2 * diagonal_a * diagonal_a\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n '''simple docstring'''\r\n\r\n\r\n\r\n\r\n if not isinstance(lowerCAmelCase__ ,lowerCAmelCase__ ) or sides < 3:\r\n raise ValueError(\r\n \"\"\"area_reg_polygon() only accepts integers greater than or \\\nequal to three as number of sides\"\"\" )\r\n elif length < 0:\r\n raise ValueError(\r\n \"\"\"area_reg_polygon() only accepts non-negative values as \\\nlength of a side\"\"\" )\r\n return (sides * length**2) / (4 * tan(pi / sides ))\r\n return (sides * length**2) / (4 * tan(pi / sides ))\r\n\r\n\r\nif __name__ == \"__main__\":\r\n import doctest\r\n\r\n doctest.testmod(verbose=True) # verbose so we can see methods missing tests\r\n\r\n print('[DEMO] Areas of various geometric shapes: \\n')\r\n print(F'''Rectangle: {area_rectangle(10, 20) = }''')\r\n print(F'''Square: {area_square(10) = }''')\r\n print(F'''Triangle: {area_triangle(10, 10) = }''')\r\n print(F'''Triangle: {area_triangle_three_sides(5, 12, 13) = }''')\r\n print(F'''Parallelogram: {area_parallelogram(10, 20) = }''')\r\n print(F'''Rhombus: {area_rhombus(10, 20) = }''')\r\n print(F'''Trapezium: {area_trapezium(10, 20, 30) = }''')\r\n print(F'''Circle: {area_circle(20) = }''')\r\n print(F'''Ellipse: {area_ellipse(10, 20) = }''')\r\n print('\\nSurface Areas of various geometric shapes: \\n')\r\n print(F'''Cube: {surface_area_cube(20) = }''')\r\n print(F'''Cuboid: {surface_area_cuboid(10, 20, 30) = }''')\r\n print(F'''Sphere: {surface_area_sphere(20) = }''')\r\n print(F'''Hemisphere: {surface_area_hemisphere(20) = }''')\r\n print(F'''Cone: {surface_area_cone(10, 20) = }''')\r\n print(F'''Conical Frustum: {surface_area_conical_frustum(10, 20, 30) = }''')\r\n print(F'''Cylinder: {surface_area_cylinder(10, 20) = }''')\r\n print(F'''Torus: {surface_area_torus(20, 10) = }''')\r\n print(F'''Equilateral Triangle: {area_reg_polygon(3, 10) = }''')\r\n print(F'''Square: {area_reg_polygon(4, 10) = }''')\r\n print(F'''Reqular Pentagon: {area_reg_polygon(5, 10) = }''')\r\n\r\n"},"code_codestyle":{"kind":"number","value":721,"string":"721"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: Tuple =\t\t\"llama\"\r\n\t\t\t_UpperCamelCase: List[str] =\t\t[\"past_key_values\"]\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=32000\t\t\t\t\t\t, lowercase_=4096\t\t\t\t\t\t, lowercase_=11008\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=\"silu\"\t\t\t\t\t\t, lowercase_=2048\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-6\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=1\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmax_position_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\thidden_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tintermediate_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tnum_hidden_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_attention_heads\r\n\r\n\t\t\t\t\t\t\t\t\t# for backward compatibility\r\n\t\t\t\t\t\t\t\t\tif num_key_value_heads is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_attention_heads\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_key_value_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\thidden_act\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tinitializer_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\trms_norm_eps\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tpretraining_tp\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tuse_cache\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\trope_scaling\r\n\t\t\t\t\t\t\t\t\tself._rope_scaling_validation()\r\n\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(\r\n\t\t\t\t\t\t\t\t\t pad_token_id=lowercase_\t\t\t\t\t\t, bos_token_id=lowercase_\t\t\t\t\t\t, eos_token_id=lowercase_\t\t\t\t\t\t, tie_word_embeddings=lowercase_\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\tif self.rope_scaling is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\r\n\t\t\t\t\t\t\t\t\tif not isinstance(self.rope_scaling\t\t\t\t\t\t, lowercase_ ) or len(self.rope_scaling ) != 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t \"\"\"`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, \"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"got {self.rope_scaling}\"\"\" )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tself.rope_scaling.get(\"\"\"type\"\"\"\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.rope_scaling.get(\"\"\"factor\"\"\"\t\t\t\t\t\t, lowercase_ )\r\n\t\t\t\t\t\t\t\t\tif rope_scaling_type is None or rope_scaling_type not in [\"linear\", \"dynamic\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t f\"\"\"`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}\"\"\" )\r\n\t\t\t\t\t\t\t\t\tif rope_scaling_factor is None or not isinstance(lowercase_\t\t\t\t\t\t, lowercase_ ) or rope_scaling_factor <= 1.0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(f\"\"\"`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}\"\"\" )\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305193,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\r\nfrom __future__ import annotations\r\n\r\nfrom decimal import Decimal\r\n\r\nfrom numpy import array\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tDecimal\r\n\r\n\t\t\t\t\t\t# Check if the provided matrix has 2 rows and 2 columns\r\n\t\t\t\t\t\t# since this implementation only works for 2x2 matrices\r\n\t\t\t\t\t\tif len(_lowerCamelCase ) == 2 and len(matrix[0] ) == 2 and len(matrix[1] ) == 2:\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Calculate the determinant of the matrix\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tfloat(\r\n\t\t\t\t\t\t\t\t\t\t\t\t d(matrix[0][0] ) * d(matrix[1][1] ) - d(matrix[1][0] ) * d(matrix[0][1] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif determinant == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"This matrix has no inverse.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Creates a copy of the matrix with swapped positions of the elements\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t[[0.0, 0.0], [0.0, 0.0]]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Dict\t\t\t\t =\t\t\t\tmatrix[1][1], matrix[0][0]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\t-matrix[1][0], -matrix[0][1]\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Calculate the inverse of the matrix\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn [\r\n\t\t\t\t\t\t\t\t\t\t\t\t [(float(d(_lowerCamelCase ) ) / determinant) or 0.0 for n in row] for row in swapped_matrix\r\n\t\t\t\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\telif (\r\n\t\t\t\t\t\t len(_lowerCamelCase ) == 3\r\n\t\t\t\t\t\t and len(matrix[0] ) == 3\r\n\t\t\t\t\t\t and len(matrix[1] ) == 3\r\n\t\t\t\t\t\t and len(matrix[2] ) == 3\r\n\t\t\t\t\t\t):\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Calculate the determinant of the matrix using Sarrus rule\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tfloat(\r\n\t\t\t\t\t\t\t\t\t\t\t\t (\r\n\t\t\t\t\t\t\t\t\t\t\t\t (d(matrix[0][0] ) * d(matrix[1][1] ) * d(matrix[2][2] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t + (d(matrix[0][1] ) * d(matrix[1][2] ) * d(matrix[2][0] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t + (d(matrix[0][2] ) * d(matrix[1][0] ) * d(matrix[2][1] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t )\r\n\t\t\t\t\t\t\t\t\t\t\t\t - (\r\n\t\t\t\t\t\t\t\t\t\t\t\t (d(matrix[0][2] ) * d(matrix[1][1] ) * d(matrix[2][0] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t + (d(matrix[0][1] ) * d(matrix[1][0] ) * d(matrix[2][2] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t + (d(matrix[0][0] ) * d(matrix[1][2] ) * d(matrix[2][1] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif determinant == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise ValueError(\"\"\"This matrix has no inverse.\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Creating cofactor matrix\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\r\n\t\t\t\t\t\t\t\t\t\t\t\t [d(0.0 ), d(0.0 ), d(0.0 )],\r\n\t\t\t\t\t\t\t\t\t\t\t\t [d(0.0 ), d(0.0 ), d(0.0 )],\r\n\t\t\t\t\t\t\t\t\t\t\t\t [d(0.0 ), d(0.0 ), d(0.0 )],\r\n\t\t\t\t\t\t\t\t\t\t\t\t]\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\t(d(matrix[1][1] ) * d(matrix[2][2] )) - (\r\n\t\t\t\t\t\t\t\t\t\t\t\t d(matrix[1][2] ) * d(matrix[2][1] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t-(\r\n\t\t\t\t\t\t\t\t\t\t\t\t (d(matrix[1][0] ) * d(matrix[2][2] )) - (d(matrix[1][2] ) * d(matrix[2][0] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t(d(matrix[1][0] ) * d(matrix[2][1] )) - (\r\n\t\t\t\t\t\t\t\t\t\t\t\t d(matrix[1][1] ) * d(matrix[2][0] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t-(\r\n\t\t\t\t\t\t\t\t\t\t\t\t (d(matrix[0][1] ) * d(matrix[2][2] )) - (d(matrix[0][2] ) * d(matrix[2][1] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t(d(matrix[0][0] ) * d(matrix[2][2] )) - (\r\n\t\t\t\t\t\t\t\t\t\t\t\t d(matrix[0][2] ) * d(matrix[2][0] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t-(\r\n\t\t\t\t\t\t\t\t\t\t\t\t (d(matrix[0][0] ) * d(matrix[2][1] )) - (d(matrix[0][1] ) * d(matrix[2][0] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t(d(matrix[0][1] ) * d(matrix[1][2] )) - (\r\n\t\t\t\t\t\t\t\t\t\t\t\t d(matrix[0][2] ) * d(matrix[1][1] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\t-(\r\n\t\t\t\t\t\t\t\t\t\t\t\t (d(matrix[0][0] ) * d(matrix[1][2] )) - (d(matrix[0][2] ) * d(matrix[1][0] ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(d(matrix[0][0] ) * d(matrix[1][1] )) - (\r\n\t\t\t\t\t\t\t\t\t\t\t\t d(matrix[0][1] ) * d(matrix[1][0] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Transpose the cofactor matrix (Adjoint matrix)\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tarray(_lowerCamelCase )\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor i in range(3 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor j in range(3 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tcofactor_matrix[j][i]\r\n\r\n # Inverse of the matrix using the formula (1/determinant) * adjoint matrix\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tarray(_lowerCamelCase )\r\n\t\t\t\t\t\t\t\t\t\t\t\tfor i in range(3 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tfor j in range(3 ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tinverse_matrix[i][j] /= d(_lowerCamelCase )\r\n\r\n # Calculate the inverse of the matrix\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn [[float(d(_lowerCamelCase ) ) or 0.0 for n in row] for row in inverse_matrix]\r\n\t\t\t\t\t\traise ValueError(\"\"\"Please provide a matrix of size 2x2 or 3x3.\"\"\" )\r\n\r\n"},"code_codestyle":{"kind":"number","value":700,"string":"700"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom collections import OrderedDict\r\nfrom typing import Mapping\r\n\r\nfrom packaging import version\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...onnx import OnnxConfig\r\nfrom ...utils import logging\r\nfrom ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={\r\n 'microsoft/swin-tiny-patch4-window7-224': (\r\n 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json'\r\n ),\r\n # See all Swin models at https://huggingface.co/models?filter=swin\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ , snake_case_ ):\r\n\t\t\t_UpperCamelCase: int =\t\t\"swin\"\r\n\r\n\t\t\t_UpperCamelCase: str =\t\t{\r\n\t\t\t \"num_attention_heads\": \"num_heads\",\r\n\t\t\t \"num_hidden_layers\": \"num_layers\",\r\n\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=224\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=3\t\t\t\t\t\t, lowercase_=96\t\t\t\t\t\t, lowercase_=[2, 2, 6, 2]\t\t\t\t\t\t, lowercase_=[3, 6, 12, 24]\t\t\t\t\t\t, lowercase_=7\t\t\t\t\t\t, lowercase_=4.0\t\t\t\t\t\t, lowercase_=True\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.0\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=\"gelu\"\t\t\t\t\t\t, lowercase_=False\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-5\t\t\t\t\t\t, lowercase_=32\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(**lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\timage_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tpatch_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tnum_channels\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tembed_dim\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tdepths\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tlen(lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnum_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\twindow_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tmlp_ratio\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tqkv_bias\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\thidden_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tattention_probs_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tdrop_path_rate\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\thidden_act\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tuse_absolute_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tlayer_norm_eps\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tinitializer_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tencoder_stride\r\n\t\t\t\t\t\t\t\t\t# we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel\r\n\t\t\t\t\t\t\t\t\t# this indicates the channel dimension after the last stage of the model\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tint(embed_dim * 2 ** (len(lowercase_ ) - 1) )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t[\"\"\"stem\"\"\"] + [f\"\"\"stage{idx}\"\"\" for idx in range(1\t\t\t\t\t\t, len(lowercase_ ) + 1 )]\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase , lowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tget_aligned_output_features_output_indices(\r\n\t\t\t\t\t\t\t\t\t out_features=lowercase_\t\t\t\t\t\t, out_indices=lowercase_\t\t\t\t\t\t, stage_names=self.stage_names )\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: int =\t\tversion.parse(\"1.11\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tMapping[str, Mapping[int, str]]:\r\n\t\t\t\t\t\t\t\t\treturn OrderedDict(\r\n\t\t\t\t\t\t\t\t\t [\r\n\t\t\t\t\t\t\t\t\t (\"\"\"pixel_values\"\"\", {0: \"\"\"batch\"\"\", 1: \"\"\"num_channels\"\"\", 2: \"\"\"height\"\"\", 3: \"\"\"width\"\"\"}),\r\n\t\t\t\t\t\t\t\t\t ] )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\t@property\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tfloat:\r\n\t\t\t\t\t\t\t\t\treturn 1e-4\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305194,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom __future__ import annotations\r\n\r\nfrom typing import Any\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ = 6 ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\tself.create_linked_list(lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNode()\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tcurrent_node\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tcurrent_node\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tcurrent_node\r\n\t\t\t\t\t\t\t\t\tfor _ in range(1\t\t\t\t\t\t, lowercase_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tNode()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tcurrent_node\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tprevious_node\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tcurrent_node\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tself.front\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tprevious_node\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (\r\n\t\t\t\t\t\t\t\t\t self.front == self.rear\r\n\t\t\t\t\t\t\t\t\t and self.front is not None\r\n\t\t\t\t\t\t\t\t\t and self.front.data is None\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tself.check_can_perform_operation()\r\n\t\t\t\t\t\t\t\t\treturn self.front.data if self.front else None\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_ ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tif self.rear is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\r\n\t\t\t\t\t\t\t\t\tself.check_is_full()\r\n\t\t\t\t\t\t\t\t\tif not self.is_empty():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.rear.next\r\n\t\t\t\t\t\t\t\t\tif self.rear:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdata\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\tself.check_can_perform_operation()\r\n\t\t\t\t\t\t\t\t\tif self.rear is None or self.front is None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn None\r\n\t\t\t\t\t\t\t\t\tif self.front == self.rear:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tself.front.data\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn data\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tself.front\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\told_front.next\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\told_front.data\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\treturn data\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tif self.is_empty():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise Exception(\"\"\"Empty Queue\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tif self.rear and self.rear.next == self.front:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\traise Exception(\"\"\"Full Queue\"\"\" )\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tNone\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\r\n"},"code_codestyle":{"kind":"number","value":701,"string":"701"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr ={\r\n 'Pillow': 'Pillow<10.0.0',\r\n 'accelerate': 'accelerate>=0.20.3',\r\n 'av': 'av==9.2.0',\r\n 'beautifulsoup4': 'beautifulsoup4',\r\n 'black': 'black~=23.1',\r\n 'codecarbon': 'codecarbon==1.2.0',\r\n 'cookiecutter': 'cookiecutter==1.7.3',\r\n 'dataclasses': 'dataclasses',\r\n 'datasets': 'datasets!=2.5.0',\r\n 'decord': 'decord==0.6.0',\r\n 'deepspeed': 'deepspeed>=0.9.3',\r\n 'diffusers': 'diffusers',\r\n 'dill': 'dill<0.3.5',\r\n 'evaluate': 'evaluate>=0.2.0',\r\n 'fairscale': 'fairscale>0.3',\r\n 'faiss-cpu': 'faiss-cpu',\r\n 'fastapi': 'fastapi',\r\n 'filelock': 'filelock',\r\n 'flax': 'flax>=0.4.1,<=0.7.0',\r\n 'ftfy': 'ftfy',\r\n 'fugashi': 'fugashi>=1.0',\r\n 'GitPython': 'GitPython<3.1.19',\r\n 'hf-doc-builder': 'hf-doc-builder>=0.3.0',\r\n 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0',\r\n 'importlib_metadata': 'importlib_metadata',\r\n 'ipadic': 'ipadic>=1.0.0,<2.0',\r\n 'isort': 'isort>=5.5.4',\r\n 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13',\r\n 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13',\r\n 'jieba': 'jieba',\r\n 'kenlm': 'kenlm',\r\n 'keras-nlp': 'keras-nlp>=0.3.1',\r\n 'librosa': 'librosa',\r\n 'nltk': 'nltk',\r\n 'natten': 'natten>=0.14.6',\r\n 'numpy': 'numpy>=1.17',\r\n 'onnxconverter-common': 'onnxconverter-common',\r\n 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2',\r\n 'onnxruntime': 'onnxruntime>=1.4.0',\r\n 'opencv-python': 'opencv-python',\r\n 'optuna': 'optuna',\r\n 'optax': 'optax>=0.0.8,<=0.1.4',\r\n 'packaging': 'packaging>=20.0',\r\n 'parameterized': 'parameterized',\r\n 'phonemizer': 'phonemizer',\r\n 'protobuf': 'protobuf',\r\n 'psutil': 'psutil',\r\n 'pyyaml': 'pyyaml>=5.1',\r\n 'pydantic': 'pydantic<2',\r\n 'pytest': 'pytest>=7.2.0',\r\n 'pytest-timeout': 'pytest-timeout',\r\n 'pytest-xdist': 'pytest-xdist',\r\n 'python': 'python>=3.8.0',\r\n 'ray[tune]': 'ray[tune]',\r\n 'regex': 'regex!=2019.12.17',\r\n 'requests': 'requests',\r\n 'rhoknp': 'rhoknp>=1.1.0,<1.3.1',\r\n 'rjieba': 'rjieba',\r\n 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1',\r\n 'ruff': 'ruff>=0.0.241,<=0.0.259',\r\n 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0',\r\n 'sacremoses': 'sacremoses',\r\n 'safetensors': 'safetensors>=0.3.1',\r\n 'sagemaker': 'sagemaker>=2.31.0',\r\n 'scikit-learn': 'scikit-learn',\r\n 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92',\r\n 'sigopt': 'sigopt',\r\n 'starlette': 'starlette',\r\n 'sudachipy': 'sudachipy>=0.6.6',\r\n 'sudachidict_core': 'sudachidict_core>=20220729',\r\n 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14',\r\n 'tensorflow': 'tensorflow>=2.6,<2.14',\r\n 'tensorflow-text': 'tensorflow-text<2.14',\r\n 'tf2onnx': 'tf2onnx',\r\n 'timeout-decorator': 'timeout-decorator',\r\n 'timm': 'timm',\r\n 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14',\r\n 'torch': 'torch>=1.9,!=1.12.0',\r\n 'torchaudio': 'torchaudio',\r\n 'torchvision': 'torchvision',\r\n 'pyctcdecode': 'pyctcdecode>=0.4.0',\r\n 'tqdm': 'tqdm>=4.27',\r\n 'unidic': 'unidic>=1.0.2',\r\n 'unidic_lite': 'unidic_lite>=1.0.7',\r\n 'urllib3': 'urllib3<2.0.0',\r\n 'uvicorn': 'uvicorn',\r\n}\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305195,"cells":{"code":{"kind":"string","value":"\r\n\r\nfrom collections.abc import Callable\r\n\r\nimport numpy as np\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tint(np.ceil((x_end - xa) / step_size ) )\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tnp.zeros((n + 1,) )\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tya\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\txa\r\n\r\n\t\t\t\t\t\tfor k in range(SCREAMING_SNAKE_CASE_ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ty[k] + step_size * ode_func(SCREAMING_SNAKE_CASE_ ,y[k] )\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\ty[k] + (\r\n\t\t\t\t\t\t\t\t\t\t\t\t (step_size / 2) * (ode_func(SCREAMING_SNAKE_CASE_ ,y[k] ) + ode_func(x + step_size ,SCREAMING_SNAKE_CASE_ ))\r\n\t\t\t\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\t\t\t\tx += step_size\r\n\r\n\t\t\t\t\t\treturn y\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\r\n"},"code_codestyle":{"kind":"number","value":702,"string":"702"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import (\r\n OptionalDependencyNotAvailable,\r\n _LazyModule,\r\n is_flax_available,\r\n is_tf_available,\r\n is_tokenizers_available,\r\n is_torch_available,\r\n)\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] ={\r\n 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'],\r\n 'tokenization_roformer': ['RoFormerTokenizer'],\r\n}\r\n\r\ntry:\r\n\t\tif not is_tokenizers_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =['RoFormerTokenizerFast']\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =[\r\n\t\t 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'RoFormerForCausalLM',\r\n\t\t 'RoFormerForMaskedLM',\r\n\t\t 'RoFormerForMultipleChoice',\r\n\t\t 'RoFormerForQuestionAnswering',\r\n\t\t 'RoFormerForSequenceClassification',\r\n\t\t 'RoFormerForTokenClassification',\r\n\t\t 'RoFormerLayer',\r\n\t\t 'RoFormerModel',\r\n\t\t 'RoFormerPreTrainedModel',\r\n\t\t 'load_tf_weights_in_roformer',\r\n\t\t]\r\n\r\n\r\ntry:\r\n\t\tif not is_tf_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =[\r\n\t\t 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'TFRoFormerForCausalLM',\r\n\t\t 'TFRoFormerForMaskedLM',\r\n\t\t 'TFRoFormerForMultipleChoice',\r\n\t\t 'TFRoFormerForQuestionAnswering',\r\n\t\t 'TFRoFormerForSequenceClassification',\r\n\t\t 'TFRoFormerForTokenClassification',\r\n\t\t 'TFRoFormerLayer',\r\n\t\t 'TFRoFormerModel',\r\n\t\t 'TFRoFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\ntry:\r\n\t\tif not is_flax_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =[\r\n\t\t 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'FlaxRoFormerForMaskedLM',\r\n\t\t 'FlaxRoFormerForMultipleChoice',\r\n\t\t 'FlaxRoFormerForQuestionAnswering',\r\n\t\t 'FlaxRoFormerForSequenceClassification',\r\n\t\t 'FlaxRoFormerForTokenClassification',\r\n\t\t 'FlaxRoFormerModel',\r\n\t\t 'FlaxRoFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig\r\n\t\tfrom .tokenization_roformer import RoFormerTokenizer\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_tokenizers_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .tokenization_roformer_fast import RoFormerTokenizerFast\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_roformer import (\r\n\t\t\t\t ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t RoFormerForCausalLM,\r\n\t\t\t\t RoFormerForMaskedLM,\r\n\t\t\t\t RoFormerForMultipleChoice,\r\n\t\t\t\t RoFormerForQuestionAnswering,\r\n\t\t\t\t RoFormerForSequenceClassification,\r\n\t\t\t\t RoFormerForTokenClassification,\r\n\t\t\t\t RoFormerLayer,\r\n\t\t\t\t RoFormerModel,\r\n\t\t\t\t RoFormerPreTrainedModel,\r\n\t\t\t\t load_tf_weights_in_roformer,\r\n\t\t\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_tf_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_tf_roformer import (\r\n\t\t\t\t TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t TFRoFormerForCausalLM,\r\n\t\t\t\t TFRoFormerForMaskedLM,\r\n\t\t\t\t TFRoFormerForMultipleChoice,\r\n\t\t\t\t TFRoFormerForQuestionAnswering,\r\n\t\t\t\t TFRoFormerForSequenceClassification,\r\n\t\t\t\t TFRoFormerForTokenClassification,\r\n\t\t\t\t TFRoFormerLayer,\r\n\t\t\t\t TFRoFormerModel,\r\n\t\t\t\t TFRoFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_flax_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_flax_roformer import (\r\n\t\t\t\t FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t FlaxRoFormerForMaskedLM,\r\n\t\t\t\t FlaxRoFormerForMultipleChoice,\r\n\t\t\t\t FlaxRoFormerForQuestionAnswering,\r\n\t\t\t\t FlaxRoFormerForSequenceClassification,\r\n\t\t\t\t FlaxRoFormerForTokenClassification,\r\n\t\t\t\t FlaxRoFormerModel,\r\n\t\t\t\t FlaxRoFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305196,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom ...configuration_utils import PretrainedConfig\r\nfrom ...utils import logging\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint =logging.get_logger(__name__)\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] ={\r\n 'SCUT-DLVCLab/lilt-roberta-en-base': (\r\n 'https://huggingface.co/SCUT-DLVCLab/lilt-roberta-en-base/resolve/main/config.json'\r\n ),\r\n}\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tsnake_case_ ):\r\n\t\t\t_UpperCamelCase: List[str] =\t\t\"lilt\"\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=30522\t\t\t\t\t\t, lowercase_=768\t\t\t\t\t\t, lowercase_=12\t\t\t\t\t\t, lowercase_=12\t\t\t\t\t\t, lowercase_=3072\t\t\t\t\t\t, lowercase_=\"gelu\"\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=0.1\t\t\t\t\t\t, lowercase_=512\t\t\t\t\t\t, lowercase_=2\t\t\t\t\t\t, lowercase_=0.0_2\t\t\t\t\t\t, lowercase_=1e-12\t\t\t\t\t\t, lowercase_=0\t\t\t\t\t\t, lowercase_=\"absolute\"\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=4\t\t\t\t\t\t, lowercase_=1024\t\t\t\t\t\t, **lowercase_\t\t\t\t\t\t, ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tsuper().__init__(pad_token_id=lowercase_\t\t\t\t\t\t, **lowercase_ )\r\n\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tvocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\thidden_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tnum_hidden_layers\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tnum_attention_heads\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\thidden_act\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tintermediate_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\thidden_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tattention_probs_dropout_prob\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tmax_position_embeddings\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\ttype_vocab_size\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tinitializer_range\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tlayer_norm_eps\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tposition_embedding_type\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tclassifier_dropout\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tchannel_shrink_ratio\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmax_ad_position_embeddings\r\n\r\n"},"code_codestyle":{"kind":"number","value":703,"string":"703"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\treturn int(input_a == input_a == 0 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tprint(\"\"\"Truth Table of NOR Gate:\"\"\" )\r\n\t\t\t\t\t\tprint(\"\"\"| Input 1 | Input 2 | Output |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 0 | 0 | {nor_gate(0 ,0 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 0 | 1 | {nor_gate(0 ,1 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 1 | 0 | {nor_gate(1 ,0 )} |\"\"\" )\r\n\t\t\t\t\t\tprint(F\"\"\"| 1 | 1 | {nor_gate(1 ,1 )} |\"\"\" )\r\n\r\n\r\nif __name__ == \"__main__\":\r\n\t\timport doctest\r\n\r\n\t\tdoctest.testmod()\r\n\t\tmain()\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305197,"cells":{"code":{"kind":"string","value":"\r\r\r\rdef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\r\r\t\t\t\t\t\t'''simple docstring'''\r\r\r\r\r\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tlen(__lowercase )\r\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tsum(__lowercase )\r\r\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\t[[False for x in range(s + 1 )] for y in range(n + 1 )]\r\r\t\t\t\t\t\tfor i in range(1 ,n + 1 ):\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tTrue\r\r\t\t\t\t\t\tfor i in range(1 ,s + 1 ):\r\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tFalse\r\r\t\t\t\t\t\tfor i in range(1 ,n + 1 ):\r\t\t\t\t\t\t\t\t\t\t\t\tfor j in range(1 ,s + 1 ):\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdp[i][j - 1]\r\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif arr[i - 1] <= j:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tdp[i][j] or dp[i - 1][j - arr[i - 1]]\r\r\t\t\t\t\t\tfor j in range(int(s / 2 ) ,-1 ,-1 ):\r\t\t\t\t\t\t\t\t\t\t\t\tif dp[n][j] is True:\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\ts - 2 * j\r\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tbreak\r\r\t\t\t\t\t\treturn diff\r\r"},"code_codestyle":{"kind":"number","value":704,"string":"704"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ={\r\n 'configuration_poolformer': [\r\n 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP',\r\n 'PoolFormerConfig',\r\n 'PoolFormerOnnxConfig',\r\n ]\r\n}\r\n\r\ntry:\r\n\t\tif not is_vision_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] =['PoolFormerFeatureExtractor']\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[str] =['PoolFormerImageProcessor']\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =[\r\n\t\t 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'PoolFormerForImageClassification',\r\n\t\t 'PoolFormerModel',\r\n\t\t 'PoolFormerPreTrainedModel',\r\n\t\t]\r\n\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_poolformer import (\r\n\t\t POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP,\r\n\t\t PoolFormerConfig,\r\n\t\t PoolFormerOnnxConfig,\r\n\t\t)\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_vision_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .feature_extraction_poolformer import PoolFormerFeatureExtractor\r\n\t\t\t\tfrom .image_processing_poolformer import PoolFormerImageProcessor\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_poolformer import (\r\n\t\t\t\t POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t PoolFormerForImageClassification,\r\n\t\t\t\t PoolFormerModel,\r\n\t\t\t\t PoolFormerPreTrainedModel,\r\n\t\t\t\t)\r\n\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tAny =_LazyModule(__name__, globals()['__file__'], _import_structure)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305198,"cells":{"code":{"kind":"string","value":"# Copyright 2023 The HuggingFace Team. All rights reserved.\r\n#\r\n# Licensed under the Apache License, Version 2.0 (the \"License\");\r\n# you may not use this file except in compliance with the License.\r\n# You may obtain a copy of the License at\r\n#\r\n# http://www.apache.org/licenses/LICENSE-2.0\r\n#\r\n# Unless required by applicable law or agreed to in writing, software\r\n# distributed under the License is distributed on an \"AS IS\" BASIS,\r\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n# See the License for the specific language governing permissions and\r\n# limitations under the License.\r\n\r\nfrom typing import TYPE_CHECKING\r\n\r\nfrom ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ={\r\n 'configuration_xmod': [\r\n 'XMOD_PRETRAINED_CONFIG_ARCHIVE_MAP',\r\n 'XmodConfig',\r\n 'XmodOnnxConfig',\r\n ],\r\n}\r\n\r\ntry:\r\n\t\tif not is_torch_available():\r\n\t\t\t\traise OptionalDependencyNotAvailable()\r\nexcept OptionalDependencyNotAvailable:\r\n\t\tpass\r\nelse:\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tstr =[\r\n\t\t 'XMOD_PRETRAINED_MODEL_ARCHIVE_LIST',\r\n\t\t 'XmodForCausalLM',\r\n\t\t 'XmodForMaskedLM',\r\n\t\t 'XmodForMultipleChoice',\r\n\t\t 'XmodForQuestionAnswering',\r\n\t\t 'XmodForSequenceClassification',\r\n\t\t 'XmodForTokenClassification',\r\n\t\t 'XmodModel',\r\n\t\t 'XmodPreTrainedModel',\r\n\t\t]\r\n\r\nif TYPE_CHECKING:\r\n\t\tfrom .configuration_xmod import XMOD_PRETRAINED_CONFIG_ARCHIVE_MAP, XmodConfig, XmodOnnxConfig\r\n\r\n\t\ttry:\r\n\t\t\t\tif not is_torch_available():\r\n\t\t\t\t\t\traise OptionalDependencyNotAvailable()\r\n\t\texcept OptionalDependencyNotAvailable:\r\n\t\t\t\tpass\r\n\t\telse:\r\n\t\t\t\tfrom .modeling_xmod import (\r\n\t\t\t\t XMOD_PRETRAINED_MODEL_ARCHIVE_LIST,\r\n\t\t\t\t XmodForCausalLM,\r\n\t\t\t\t XmodForMaskedLM,\r\n\t\t\t\t XmodForMultipleChoice,\r\n\t\t\t\t XmodForQuestionAnswering,\r\n\t\t\t\t XmodForSequenceClassification,\r\n\t\t\t\t XmodForTokenClassification,\r\n\t\t\t\t XmodModel,\r\n\t\t\t\t XmodPreTrainedModel,\r\n\t\t\t\t)\r\n\r\nelse:\r\n\t\timport sys\r\n\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tUnion[str, Any] =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)\r\n\r\n"},"code_codestyle":{"kind":"number","value":705,"string":"705"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\nimport os\r\nimport string\r\nimport sys\r\n\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[int] =1 << 8\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tList[Any] ={\r\n 'tab': ord('\\t'),\r\n 'newline': ord('\\r'),\r\n 'esc': 27,\r\n 'up': 65 + ARROW_KEY_FLAG,\r\n 'down': 66 + ARROW_KEY_FLAG,\r\n 'right': 67 + ARROW_KEY_FLAG,\r\n 'left': 68 + ARROW_KEY_FLAG,\r\n 'mod_int': 91,\r\n 'undefined': sys.maxsize,\r\n 'interrupt': 3,\r\n 'insert': 50,\r\n 'delete': 51,\r\n 'pg_up': 53,\r\n 'pg_down': 54,\r\n}\r\n\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =KEYMAP['up']\r\nlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tTuple =KEYMAP['left']\r\n\r\nif sys.platform == \"win32\":\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tDict =[]\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tint ={\r\n\t\t b'\\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00H': KEYMAP['up'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00P': KEYMAP['down'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00M': KEYMAP['right'] - ARROW_KEY_FLAG,\r\n\t\t b'\\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG,\r\n\t\t b'\\x00K': KEYMAP['left'] - ARROW_KEY_FLAG,\r\n\t\t}\r\n\r\nfor i in range(10):\r\n\t\tlowerCAmelCase\t\t\t\t:\t\t\t\t\t\t\tOptional[Any] =ord(str(i))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif os.name == \"nt\":\r\n\t\t\t\t\t\t\t\t\t\t\t\timport msvcrt\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t\"\"\"mbcs\"\"\"\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Flush the keyboard buffer\r\n\t\t\t\t\t\t\t\t\t\t\t\twhile msvcrt.kbhit():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tmsvcrt.getch()\r\n\t\t\t\t\t\t\t\t\t\t\t\tif len(SCREAMING_SNAKE_CASE__ ) == 0:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Read the keystroke\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tmsvcrt.getch()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# If it is a prefix char, get second part\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ch in (b\"\\x00\", b\"\\xe0\"):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tch + msvcrt.getch()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t# Translate actual Win chars to bullet char types\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tchr(WIN_KEYMAP[cha] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(chr(KEYMAP[\"\"\"mod_int\"\"\"] ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) in (\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"insert\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"delete\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"pg_up\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t KEYMAP[\"pg_down\"] - 1 << 9,\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tWIN_CH_BUFFER.append(chr(1_2_6 ) )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tchr(KEYMAP[\"\"\"esc\"\"\"] )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texcept KeyError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tcha[1]\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tch.decode(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tWIN_CH_BUFFER.pop(0 )\r\n\t\t\t\t\t\telif os.name == \"posix\":\r\n\t\t\t\t\t\t\t\t\t\t\t\timport termios\r\n\t\t\t\t\t\t\t\t\t\t\t\timport tty\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tsys.stdin.fileno()\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\ttermios.tcgetattr(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttty.setraw(SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tsys.stdin.read(1 )\r\n\t\t\t\t\t\t\t\t\t\t\t\tfinally:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\ttermios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ )\r\n\t\t\t\t\t\treturn ch\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP[\"interrupt\"], KEYMAP[\"newline\"]]:\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn char\r\n\r\n\t\t\t\t\t\telif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP[\"esc\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP[\"mod_int\"]:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tget_raw_chars()\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tif ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP[\"arrow_begin\"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP[\"arrow_end\"] - ARROW_KEY_FLAG:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn KEYMAP[\"undefined\"]\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn get_raw_chars()\r\n\r\n\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\tif char in string.printable:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn char\r\n\t\t\t\t\t\t\t\t\t\t\t\telse:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn KEYMAP[\"undefined\"]\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}},{"rowIdx":305199,"cells":{"code":{"kind":"string","value":"\r\n\r\n\r\n\r\n# This code is adapted from OpenAI's release\r\n# https://github.com/openai/human-eval/blob/master/human_eval/execution.py\r\n\r\nimport contextlib\r\nimport faulthandler\r\nimport io\r\nimport multiprocessing\r\nimport os\r\nimport platform\r\nimport signal\r\nimport tempfile\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tmultiprocessing.Manager()\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmanager.list()\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tmultiprocessing.Process(target=lowerCAmelCase__ , args=(check_program, result, timeout) )\r\n\t\t\t\t\t\tp.start()\r\n\t\t\t\t\t\tp.join(timeout=timeout + 1 )\r\n\t\t\t\t\t\tif p.is_alive():\r\n\t\t\t\t\t\t\t\t\t\t\t\tp.kill()\r\n\r\n\t\t\t\t\t\tif not result:\r\n\t\t\t\t\t\t\t\t\t\t\t\tresult.append(\"\"\"timed out\"\"\" )\r\n\r\n\t\t\t\t\t\treturn {\r\n\t\t\t\t\t\t \"task_id\": task_id,\r\n\t\t\t\t\t\t \"passed\": result[0] == \"passed\",\r\n\t\t\t\t\t\t \"result\": result[0],\r\n\t\t\t\t\t\t \"completion_id\": completion_id,\r\n\t\t\t\t\t\t}\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith create_tempdir():\r\n\t\t\t\t\t\t\t\t\t\t\t\t# These system calls are needed when cleaning up tempdir.\r\n\t\t\t\t\t\t\t\t\t\t\t\timport os\r\n\t\t\t\t\t\t\t\t\t\t\t\timport shutil\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tshutil.rmtree\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tos.rmdir\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tos.chdir\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Disable functionalities that can make destructive changes to the test.\r\n\t\t\t\t\t\t\t\t\t\t\t\treliability_guard()\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Run program.\r\n\t\t\t\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\t{}\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith swallow_io():\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith time_limit(lowerCAmelCase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\texec(lowerCAmelCase__ , lowerCAmelCase__ )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tresult.append(\"\"\"passed\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\texcept TimeoutException:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tresult.append(\"\"\"timed out\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\texcept BaseException as e:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tresult.append(F\"\"\"failed: {e}\"\"\" )\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\t# Needed for cleaning up.\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\trmtree\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\trmdir\r\n\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tchdir\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@contextlib.contextmanager\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tdef signal_handler(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\traise TimeoutException(\"\"\"Timed out!\"\"\" )\r\n\r\n\t\t\t\t\t\tsignal.setitimer(signal.ITIMER_REAL , lowerCAmelCase__ )\r\n\t\t\t\t\t\tsignal.signal(signal.SIGALRM , lowerCAmelCase__ )\r\n\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\tyield\r\n\t\t\t\t\t\tfinally:\r\n\t\t\t\t\t\t\t\t\t\t\t\tsignal.setitimer(signal.ITIMER_REAL , 0 )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@contextlib.contextmanager\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tWriteOnlyStringIO()\r\n\t\t\t\t\t\twith contextlib.redirect_stdout(lowerCAmelCase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\twith contextlib.redirect_stderr(lowerCAmelCase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\twith redirect_stdin(lowerCAmelCase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tyield\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@contextlib.contextmanager\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\t):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\twith tempfile.TemporaryDirectory() as dirname:\r\n\t\t\t\t\t\t\t\t\t\t\t\twith chdir(lowerCAmelCase__ ):\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tyield dirname\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\t__a ):\r\n\t\t\tpass\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tio.StringIO ):\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, *lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\traise OSError\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, *lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\traise OSError\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, *lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\traise OSError\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, *lowercase_\t\t\t\t\t\t, **lowercase_ ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn False\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a (\t\t\tcontextlib._RedirectStream ): # type: ignore\r\n\t\t\t_UpperCamelCase: List[Any] =\t\t'''stdin'''\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\n@contextlib.contextmanager\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__ ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif root == \".\":\r\n\t\t\t\t\t\t\t\t\t\t\t\tyield\r\n\t\t\t\t\t\t\t\t\t\t\t\treturn\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tos.getcwd()\r\n\t\t\t\t\t\tos.chdir(lowerCAmelCase__ )\r\n\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\tyield\r\n\t\t\t\t\t\texcept BaseException as exc:\r\n\t\t\t\t\t\t\t\t\t\t\t\traise exc\r\n\t\t\t\t\t\tfinally:\r\n\t\t\t\t\t\t\t\t\t\t\t\tos.chdir(lowerCAmelCase__ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\r\ndef _UpperCAmelCase (\t\t\t\t\t\t\tSCREAMING_SNAKE_CASE__=None ):\r\n\r\n\r\n\t\t\t\t\t\t'''simple docstring'''\r\n\r\n\r\n\r\n\r\n\t\t\t\t\t\tif maximum_memory_bytes is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\timport resource\r\n\r\n\t\t\t\t\t\t\t\t\t\t\t\tresource.setrlimit(resource.RLIMIT_AS , (maximum_memory_bytes, maximum_memory_bytes) )\r\n\t\t\t\t\t\t\t\t\t\t\t\tresource.setrlimit(resource.RLIMIT_DATA , (maximum_memory_bytes, maximum_memory_bytes) )\r\n\t\t\t\t\t\t\t\t\t\t\t\tif not platform.uname().system == \"Darwin\":\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tresource.setrlimit(resource.RLIMIT_STACK , (maximum_memory_bytes, maximum_memory_bytes) )\r\n\r\n\t\t\t\t\t\tfaulthandler.disable()\r\n\r\n\t\t\t\t\t\timport builtins\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\timport os\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\t\"\"\"1\"\"\"\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\timport shutil\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\timport subprocess\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : List[str]\t\t\t\t =\t\t\t\tNone # type: ignore\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tNone\r\n\r\n\t\t\t\t\t\timport sys\r\n\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[Any]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tNone\r\n\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tNone\r\n\r\n"},"code_codestyle":{"kind":"number","value":706,"string":"706"},"style_context":{"kind":"string","value":"\r\n\r\n\r\n\r\n# Imports\r\nimport numpy as np\r\n\r\n\r\n\r\n\r\n\r\n\r\nclass _a :\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef __init__(\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\tself.set_matricies(red=lowercase_\t\t\t\t\t\t, green=lowercase_\t\t\t\t\t\t, blue=lowercase_\t\t\t\t\t\t, red_edge=lowercase_\t\t\t\t\t\t, nir=lowercase_ )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\tif red is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : str\t\t\t\t =\t\t\t\tred\r\n\t\t\t\t\t\t\t\t\tif green is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tgreen\r\n\t\t\t\t\t\t\t\t\tif blue is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Optional[int]\t\t\t\t =\t\t\t\tblue\r\n\t\t\t\t\t\t\t\t\tif red_edge is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\tred_edge\r\n\t\t\t\t\t\t\t\t\tif nir is not None:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tlowerCAmelCase : Union[str, Any]\t\t\t\t =\t\t\t\tnir\r\n\t\t\t\t\t\t\t\t\treturn True\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=\"\"\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tself.set_matricies(red=lowercase_\t\t\t\t\t\t, green=lowercase_\t\t\t\t\t\t, blue=lowercase_\t\t\t\t\t\t, red_edge=lowercase_\t\t\t\t\t\t, nir=lowercase_ )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : int\t\t\t\t =\t\t\t\t{\r\n\t\t\t\t\t\t\t\t\t \"\"\"ARVI2\"\"\": self.arvaa,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CCCI\"\"\": self.ccci,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CVI\"\"\": self.cvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GLI\"\"\": self.gli,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NDVI\"\"\": self.ndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"BNDVI\"\"\": self.bndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"redEdgeNDVI\"\"\": self.red_edge_ndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GNDVI\"\"\": self.gndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GBNDVI\"\"\": self.gbndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GRNDVI\"\"\": self.grndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RBNDVI\"\"\": self.rbndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"PNDVI\"\"\": self.pndvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"ATSAVI\"\"\": self.atsavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"BWDRVI\"\"\": self.bwdrvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CIgreen\"\"\": self.ci_green,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CIrededge\"\"\": self.ci_rededge,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CI\"\"\": self.ci,\r\n\t\t\t\t\t\t\t\t\t \"\"\"CTVI\"\"\": self.ctvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GDVI\"\"\": self.gdvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"EVI\"\"\": self.evi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GEMI\"\"\": self.gemi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GOSAVI\"\"\": self.gosavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"GSAVI\"\"\": self.gsavi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"Hue\"\"\": self.hue,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IVI\"\"\": self.ivi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IPVI\"\"\": self.ipvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"I\"\"\": self.i,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RVI\"\"\": self.rvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"MRVI\"\"\": self.mrvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"MSAVI\"\"\": self.m_savi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormG\"\"\": self.norm_g,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormNIR\"\"\": self.norm_nir,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NormR\"\"\": self.norm_r,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NGRDI\"\"\": self.ngrdi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"RI\"\"\": self.ri,\r\n\t\t\t\t\t\t\t\t\t \"\"\"S\"\"\": self.s,\r\n\t\t\t\t\t\t\t\t\t \"\"\"IF\"\"\": self._if,\r\n\t\t\t\t\t\t\t\t\t \"\"\"DVI\"\"\": self.dvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"TVI\"\"\": self.tvi,\r\n\t\t\t\t\t\t\t\t\t \"\"\"NDRE\"\"\": self.ndre,\r\n\t\t\t\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t\t\t\ttry:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn funcs[index]()\r\n\t\t\t\t\t\t\t\t\texcept KeyError:\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\tprint(\"\"\"Index not in the list!\"\"\" )\r\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\treturn False\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red)))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / (\r\n\t\t\t\t\t\t\t\t\t (self.nir - self.red) / (self.nir + self.red)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir * (self.red / (self.green**2))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (2 * self.green - self.red - self.blue) / (\r\n\t\t\t\t\t\t\t\t\t 2 * self.green + self.red + self.blue\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.red) / (self.nir + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.blue) / (self.nir + self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.redEdge - self.red) / (self.redEdge + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.green) / (self.nir + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.blue)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.blue)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.red)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.red)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - (self.green + self.red + self.blue)) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + (self.green + self.red + self.blue)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.0_8\t\t\t\t\t\t, lowercase_=1.2_2\t\t\t\t\t\t, lowercase_=0.0_3 ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn a * (\r\n\t\t\t\t\t\t\t\t\t (self.nir - a * self.red - b)\r\n\t\t\t\t\t\t\t\t\t / (a * self.nir + self.red - a * b + x * (1 + a**2))\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / self.green) - 1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / self.redEdge) - 1\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.red - self.blue) / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tself.ndvi()\r\n\t\t\t\t\t\t\t\t\treturn ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2))\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir - self.green\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn 2.5 * (\r\n\t\t\t\t\t\t\t\t\t (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1)\r\n\t\t\t\t\t\t\t\t\t)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[Any]:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Tuple\t\t\t\t =\t\t\t\t(2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / (\r\n\t\t\t\t\t\t\t\t\t self.nir + self.red + 0.5\r\n\t\t\t\t\t\t\t\t\t)\r\n\t\t\t\t\t\t\t\t\treturn n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.1_6 ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.green) / (self.nir + self.green + y)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=0.5 ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn np.arctan(\r\n\t\t\t\t\t\t\t\t\t ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) )\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself\t\t\t\t\t\t, lowercase_=None\t\t\t\t\t\t, lowercase_=None ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - b) / (a * self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tAny:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.red + self.green + self.blue) / 3_0.5\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tUnion[str, Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\treturn (self.rvi() - 1) / (self.rvi() + 1)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn (\r\n\t\t\t\t\t\t\t\t\t (2 * self.nir + 1)\r\n\t\t\t\t\t\t\t\t\t - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2)\r\n\t\t\t\t\t\t\t\t\t) / 2\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn self.green / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tDict:\r\n\t\t\t\t\t\t\t\t\treturn self.red / (self.nir + self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[Any]:\r\n\t\t\t\t\t\t\t\t\treturn (self.green - self.red) / (self.green + self.red)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tOptional[int]:\r\n\t\t\t\t\t\t\t\t\treturn (self.red - self.green) / (self.red + self.green)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tTuple:\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Any\t\t\t\t =\t\t\t\tnp.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] )\r\n\t\t\t\t\t\t\t\t\tlowerCAmelCase : Dict\t\t\t\t =\t\t\t\tnp.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] )\r\n\t\t\t\t\t\t\t\t\treturn (max_value - min_value) / max_value\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (2 * self.red - self.green - self.blue) / (self.green - self.blue)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tList[str]:\r\n\t\t\t\t\t\t\t\t\treturn self.nir / self.red\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tint:\r\n\t\t\t\t\t\t\t\t\treturn (self.ndvi() + 0.5) ** (1 / 2)\r\n\r\n\r\n\r\n\r\n\r\n\r\n\t\t\tdef _snake_case (\t\t\t\t\t\tself ) ->\t\t\t\tstr:\r\n\t\t\t\t\t\t\t\t\treturn (self.nir - self.redEdge) / (self.nir + self.redEdge)\r\n\r\n"},"style_context_codestyle":{"kind":"number","value":693,"string":"693"},"label":{"kind":"number","value":0,"string":"0"}}}],"truncated":false,"partial":false},"paginationData":{"pageIndex":3051,"numItemsPerPage":100,"numTotalItems":307987,"offset":305100,"length":100}},"jwt":"eyJhbGciOiJFZERTQSJ9.eyJyZWFkIjp0cnVlLCJwZXJtaXNzaW9ucyI6eyJyZXBvLmNvbnRlbnQucmVhZCI6dHJ1ZX0sImlhdCI6MTc1NjQzMzEyNiwic3ViIjoiL2RhdGFzZXRzL2luZmluaXR5b2ZzcGFjZS9weXRob25fY29kZXN0eWxlcy1zaW5nbGUtMWsiLCJleHAiOjE3NTY0MzY3MjYsImlzcyI6Imh0dHBzOi8vaHVnZ2luZ2ZhY2UuY28ifQ.6o2dMdI5mL_EinVQCFdoM8My64U14RKVdFs8se6UYeasohgFzXMEeizII7fpHnAzItjOcGhezslANyDzmbH5CA","displayUrls":true},"discussionsStats":{"closed":0,"open":0,"total":0},"fullWidth":true,"hasGatedAccess":true,"hasFullAccess":true,"isEmbedded":false,"savedQueries":{"community":[],"user":[]}}">

code
stringlengths
81
54k
code_codestyle
int64
0
721
style_context
stringlengths
91
41.9k
style_context_codestyle
int64
0
699
label
int64
0
1
import sys import tempfile import unittest import unittest.mock as mock from pathlib import Path from huggingface_hub import HfFolder, delete_repo from requests.exceptions import HTTPError from transformers import AutoFeatureExtractor, WavaVecaFeatureExtractor from transformers.testing_utils import TOKEN, USER, get_tests_dir, is_staging_test sys.path.append(str(Path(__file__).parent.parent / 'utils')) from test_module.custom_feature_extraction import CustomFeatureExtractor # noqa E402 lowerCAmelCase : List[Any] =get_tests_dir('fixtures') class _a ( unittest.TestCase ): def _snake_case ( self ) -> List[Any]: # A mock response for an HTTP head request to emulate server down lowerCAmelCase : Tuple = mock.Mock() lowerCAmelCase : Optional[Any] = 500 lowerCAmelCase : Dict = {} lowerCAmelCase : Union[str, Any] = HTTPError lowerCAmelCase : int = {} # Download this model to make sure it's in the cache. lowerCAmelCase : Optional[Any] = WavaVecaFeatureExtractor.from_pretrained("""hf-internal-testing/tiny-random-wav2vec2""" ) # Under the mock environment we get a 500 error when trying to reach the model. with mock.patch("""requests.Session.request""" , return_value=lowercase_ ) as mock_head: lowerCAmelCase : Union[str, Any] = WavaVecaFeatureExtractor.from_pretrained("""hf-internal-testing/tiny-random-wav2vec2""" ) # This check we did call the fake head request mock_head.assert_called() def _snake_case ( self ) -> Optional[Any]: # This test is for deprecated behavior and can be removed in v5 lowerCAmelCase : Tuple = WavaVecaFeatureExtractor.from_pretrained( """https://huggingface.co/hf-internal-testing/tiny-random-wav2vec2/resolve/main/preprocessor_config.json""" ) @is_staging_test class _a ( unittest.TestCase ): @classmethod def _snake_case ( cls ) -> Dict: lowerCAmelCase : Union[str, Any] = TOKEN HfFolder.save_token(lowercase_ ) @classmethod def _snake_case ( cls ) -> Optional[int]: try: delete_repo(token=cls._token , repo_id="""test-feature-extractor""" ) except HTTPError: pass try: delete_repo(token=cls._token , repo_id="""valid_org/test-feature-extractor-org""" ) except HTTPError: pass try: delete_repo(token=cls._token , repo_id="""test-dynamic-feature-extractor""" ) except HTTPError: pass def _snake_case ( self ) -> List[str]: lowerCAmelCase : Optional[int] = WavaVecaFeatureExtractor.from_pretrained(lowercase_ ) feature_extractor.push_to_hub("""test-feature-extractor""" , use_auth_token=self._token ) lowerCAmelCase : List[str] = WavaVecaFeatureExtractor.from_pretrained(f"""{USER}/test-feature-extractor""" ) for k, v in feature_extractor.__dict__.items(): self.assertEqual(lowercase_ , getattr(lowercase_ , lowercase_ ) ) # Reset repo delete_repo(token=self._token , repo_id="""test-feature-extractor""" ) # Push to hub via save_pretrained with tempfile.TemporaryDirectory() as tmp_dir: feature_extractor.save_pretrained( lowercase_ , repo_id="""test-feature-extractor""" , push_to_hub=lowercase_ , use_auth_token=self._token ) lowerCAmelCase : List[Any] = WavaVecaFeatureExtractor.from_pretrained(f"""{USER}/test-feature-extractor""" ) for k, v in feature_extractor.__dict__.items(): self.assertEqual(lowercase_ , getattr(lowercase_ , lowercase_ ) ) def _snake_case ( self ) -> Dict: lowerCAmelCase : Optional[Any] = WavaVecaFeatureExtractor.from_pretrained(lowercase_ ) feature_extractor.push_to_hub("""valid_org/test-feature-extractor""" , use_auth_token=self._token ) lowerCAmelCase : int = WavaVecaFeatureExtractor.from_pretrained("""valid_org/test-feature-extractor""" ) for k, v in feature_extractor.__dict__.items(): self.assertEqual(lowercase_ , getattr(lowercase_ , lowercase_ ) ) # Reset repo delete_repo(token=self._token , repo_id="""valid_org/test-feature-extractor""" ) # Push to hub via save_pretrained with tempfile.TemporaryDirectory() as tmp_dir: feature_extractor.save_pretrained( lowercase_ , repo_id="""valid_org/test-feature-extractor-org""" , push_to_hub=lowercase_ , use_auth_token=self._token ) lowerCAmelCase : Tuple = WavaVecaFeatureExtractor.from_pretrained("""valid_org/test-feature-extractor-org""" ) for k, v in feature_extractor.__dict__.items(): self.assertEqual(lowercase_ , getattr(lowercase_ , lowercase_ ) ) def _snake_case ( self ) -> Tuple: CustomFeatureExtractor.register_for_auto_class() lowerCAmelCase : Dict = CustomFeatureExtractor.from_pretrained(lowercase_ ) feature_extractor.push_to_hub("""test-dynamic-feature-extractor""" , use_auth_token=self._token ) # This has added the proper auto_map field to the config self.assertDictEqual( feature_extractor.auto_map , {"""AutoFeatureExtractor""": """custom_feature_extraction.CustomFeatureExtractor"""} , ) lowerCAmelCase : Optional[int] = AutoFeatureExtractor.from_pretrained( f"""{USER}/test-dynamic-feature-extractor""" , trust_remote_code=lowercase_ ) # Can't make an isinstance check because the new_feature_extractor is from the CustomFeatureExtractor class of a dynamic module self.assertEqual(new_feature_extractor.__class__.__name__ , """CustomFeatureExtractor""" )
693
from ..utils import DummyObject, requires_backends class _a ( metaclass=snake_case_ ): _UpperCamelCase: List[Any] = ["keras_nlp"] def __init__( self , *lowercase_ , **lowercase_ ) -> Tuple: requires_backends(self , ["""keras_nlp"""] )
693
1
# flake8: noqa # Lint as: python3 lowerCAmelCase : List[Any] =[ 'VerificationMode', 'Version', 'disable_progress_bar', 'enable_progress_bar', 'is_progress_bar_enabled', 'experimental', ] from .info_utils import VerificationMode from .logging import disable_progress_bar, enable_progress_bar, is_progress_bar_enabled from .version import Version from .experimental import experimental
693
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version('>=', '4.25.0')): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: from ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline else: from .pipeline_unclip import UnCLIPPipeline from .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline from .text_proj import UnCLIPTextProjModel
693
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase : Union[str, Any] ={ 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'], 'tokenization_roformer': ['RoFormerTokenizer'], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =['RoFormerTokenizerFast'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Optional[int] =[ 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'RoFormerForCausalLM', 'RoFormerForMaskedLM', 'RoFormerForMultipleChoice', 'RoFormerForQuestionAnswering', 'RoFormerForSequenceClassification', 'RoFormerForTokenClassification', 'RoFormerLayer', 'RoFormerModel', 'RoFormerPreTrainedModel', 'load_tf_weights_in_roformer', ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'TFRoFormerForCausalLM', 'TFRoFormerForMaskedLM', 'TFRoFormerForMultipleChoice', 'TFRoFormerForQuestionAnswering', 'TFRoFormerForSequenceClassification', 'TFRoFormerForTokenClassification', 'TFRoFormerLayer', 'TFRoFormerModel', 'TFRoFormerPreTrainedModel', ] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : int =[ 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'FlaxRoFormerForMaskedLM', 'FlaxRoFormerForMultipleChoice', 'FlaxRoFormerForQuestionAnswering', 'FlaxRoFormerForSequenceClassification', 'FlaxRoFormerForTokenClassification', 'FlaxRoFormerModel', 'FlaxRoFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig from .tokenization_roformer import RoFormerTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_roformer_fast import RoFormerTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_roformer import ( ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, RoFormerForCausalLM, RoFormerForMaskedLM, RoFormerForMultipleChoice, RoFormerForQuestionAnswering, RoFormerForSequenceClassification, RoFormerForTokenClassification, RoFormerLayer, RoFormerModel, RoFormerPreTrainedModel, load_tf_weights_in_roformer, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_roformer import ( TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerLayer, TFRoFormerModel, TFRoFormerPreTrainedModel, ) try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_roformer import ( FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, FlaxRoFormerForMaskedLM, FlaxRoFormerForMultipleChoice, FlaxRoFormerForQuestionAnswering, FlaxRoFormerForSequenceClassification, FlaxRoFormerForTokenClassification, FlaxRoFormerModel, FlaxRoFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Tuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if p < 2: raise ValueError("""p should not be less than 2!""" ) elif p == 2: return True lowerCAmelCase : List[Any] = 4 lowerCAmelCase : Tuple = (1 << p) - 1 for _ in range(p - 2 ): lowerCAmelCase : Dict = ((s * s) - 2) % m return s == 0 if __name__ == "__main__": print(lucas_lehmer_test(7)) print(lucas_lehmer_test(11))
693
1
import importlib import sys from argparse import REMAINDER, ArgumentParser from pathlib import Path import torch_xla.distributed.xla_multiprocessing as xmp def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : int = ArgumentParser( description=( """PyTorch TPU distributed training launch helper utility that will spawn up multiple distributed processes""" ) ) # Optional arguments for the launch helper parser.add_argument("""--num_cores""" ,type=SCREAMING_SNAKE_CASE__ ,default=1 ,help="""Number of TPU cores to use (1 or 8).""" ) # positional parser.add_argument( """training_script""" ,type=SCREAMING_SNAKE_CASE__ ,help=( """The full path to the single TPU training """ """program/script to be launched in parallel, """ """followed by all the arguments for the """ """training script""" ) ,) # rest from the training program parser.add_argument("""training_script_args""" ,nargs=SCREAMING_SNAKE_CASE__ ) return parser.parse_args() def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : str = parse_args() # Import training_script as a module. lowerCAmelCase : Optional[Any] = Path(args.training_script ) sys.path.append(str(script_fpath.parent.resolve() ) ) lowerCAmelCase : Any = script_fpath.stem lowerCAmelCase : List[Any] = importlib.import_module(SCREAMING_SNAKE_CASE__ ) # Patch sys.argv lowerCAmelCase : Optional[Any] = [args.training_script] + args.training_script_args + ["""--tpu_num_cores""", str(args.num_cores )] xmp.spawn(mod._mp_fn ,args=() ,nprocs=args.num_cores ) if __name__ == "__main__": main()
693
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin from . import IFPipelineTesterMixin @skip_mps class _a ( snake_case_ , snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = IFImgaImgSuperResolutionPipeline _UpperCamelCase: int = TEXT_GUIDED_IMAGE_VARIATION_PARAMS - {"width", "height"} _UpperCamelCase: Optional[int] = TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({"original_image"} ) _UpperCamelCase: List[str] = PipelineTesterMixin.required_optional_params - {"latents"} def _snake_case ( self ) -> int: return self._get_superresolution_dummy_components() def _snake_case ( self , lowercase_ , lowercase_=0 ) -> Optional[Any]: if str(lowercase_ ).startswith("""mps""" ): lowerCAmelCase : Any = torch.manual_seed(lowercase_ ) else: lowerCAmelCase : List[Any] = torch.Generator(device=lowercase_ ).manual_seed(lowercase_ ) lowerCAmelCase : Union[str, Any] = floats_tensor((1, 3, 32, 32) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[Any] = floats_tensor((1, 3, 16, 16) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[int] = { """prompt""": """A painting of a squirrel eating a burger""", """image""": image, """original_image""": original_image, """generator""": generator, """num_inference_steps""": 2, """output_type""": """numpy""", } return inputs @unittest.skipIf( torch_device != """cuda""" or not is_xformers_available() , reason="""XFormers attention is only available with CUDA and `xformers` installed""" , ) def _snake_case ( self ) -> Optional[int]: self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 ) def _snake_case ( self ) -> int: self._test_save_load_optional_components() @unittest.skipIf(torch_device != """cuda""" , reason="""float16 requires CUDA""" ) def _snake_case ( self ) -> Any: # Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder super().test_save_load_floataa(expected_max_diff=1e-1 ) def _snake_case ( self ) -> int: self._test_attention_slicing_forward_pass(expected_max_diff=1e-2 ) def _snake_case ( self ) -> Any: self._test_save_load_local() def _snake_case ( self ) -> str: self._test_inference_batch_single_identical( expected_max_diff=1e-2 , )
693
1
print((lambda quine: quine % quine)('print((lambda quine: quine %% quine)(%r))'))
693
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={} class _a ( snake_case_ ): _UpperCamelCase: Tuple = "llama" _UpperCamelCase: List[str] = ["past_key_values"] def __init__( self , lowercase_=32000 , lowercase_=4096 , lowercase_=11008 , lowercase_=32 , lowercase_=32 , lowercase_=None , lowercase_="silu" , lowercase_=2048 , lowercase_=0.0_2 , lowercase_=1e-6 , lowercase_=True , lowercase_=0 , lowercase_=1 , lowercase_=2 , lowercase_=1 , lowercase_=False , lowercase_=None , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : str = max_position_embeddings lowerCAmelCase : int = hidden_size lowerCAmelCase : Optional[int] = intermediate_size lowerCAmelCase : int = num_hidden_layers lowerCAmelCase : Any = num_attention_heads # for backward compatibility if num_key_value_heads is None: lowerCAmelCase : Any = num_attention_heads lowerCAmelCase : Any = num_key_value_heads lowerCAmelCase : Any = hidden_act lowerCAmelCase : Union[str, Any] = initializer_range lowerCAmelCase : str = rms_norm_eps lowerCAmelCase : int = pretraining_tp lowerCAmelCase : int = use_cache lowerCAmelCase : Optional[Any] = rope_scaling self._rope_scaling_validation() super().__init__( pad_token_id=lowercase_ , bos_token_id=lowercase_ , eos_token_id=lowercase_ , tie_word_embeddings=lowercase_ , **lowercase_ , ) def _snake_case ( self ) -> Dict: if self.rope_scaling is None: return if not isinstance(self.rope_scaling , lowercase_ ) or len(self.rope_scaling ) != 2: raise ValueError( """`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, """ f"""got {self.rope_scaling}""" ) lowerCAmelCase : Union[str, Any] = self.rope_scaling.get("""type""" , lowercase_ ) lowerCAmelCase : Dict = self.rope_scaling.get("""factor""" , lowercase_ ) if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]: raise ValueError( f"""`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}""" ) if rope_scaling_factor is None or not isinstance(lowercase_ , lowercase_ ) or rope_scaling_factor <= 1.0: raise ValueError(f"""`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}""" )
693
1
from typing import List, Optional, Union import numpy as np import PIL import torch from PIL import Image from ...models import UNetaDConditionModel, VQModel from ...pipelines import DiffusionPipeline from ...pipelines.pipeline_utils import ImagePipelineOutput from ...schedulers import DDPMScheduler from ...utils import ( is_accelerate_available, is_accelerate_version, logging, randn_tensor, replace_example_docstring, ) lowerCAmelCase : List[Any] =logging.get_logger(__name__) # pylint: disable=invalid-name lowerCAmelCase : List[str] ='\n Examples:\n ```py\n >>> from diffusers import KandinskyV22Img2ImgPipeline, KandinskyV22PriorPipeline\n >>> from diffusers.utils import load_image\n >>> import torch\n\n >>> pipe_prior = KandinskyV22PriorPipeline.from_pretrained(\n ... "kandinsky-community/kandinsky-2-2-prior", torch_dtype=torch.float16\n ... )\n >>> pipe_prior.to("cuda")\n\n >>> prompt = "A red cartoon frog, 4k"\n >>> image_emb, zero_image_emb = pipe_prior(prompt, return_dict=False)\n\n >>> pipe = KandinskyV22Img2ImgPipeline.from_pretrained(\n ... "kandinsky-community/kandinsky-2-2-decoder", torch_dtype=torch.float16\n ... )\n >>> pipe.to("cuda")\n\n >>> init_image = load_image(\n ... "https://huggingface.co/datasets/hf-internal-testing/diffusers-images/resolve/main"\n ... "/kandinsky/frog.png"\n ... )\n\n >>> image = pipe(\n ... image=init_image,\n ... image_embeds=image_emb,\n ... negative_image_embeds=zero_image_emb,\n ... height=768,\n ... width=768,\n ... num_inference_steps=100,\n ... strength=0.2,\n ... ).images\n\n >>> image[0].save("red_frog.png")\n ```\n' def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=8 ): '''simple docstring''' lowerCAmelCase : Any = height // scale_factor**2 if height % scale_factor**2 != 0: new_height += 1 lowerCAmelCase : Any = width // scale_factor**2 if width % scale_factor**2 != 0: new_width += 1 return new_height * scale_factor, new_width * scale_factor def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=5_1_2 ,SCREAMING_SNAKE_CASE__=5_1_2 ): '''simple docstring''' lowerCAmelCase : List[Any] = pil_image.resize((w, h) ,resample=Image.BICUBIC ,reducing_gap=1 ) lowerCAmelCase : Dict = np.array(pil_image.convert("""RGB""" ) ) lowerCAmelCase : List[str] = arr.astype(np.floataa ) / 127.5 - 1 lowerCAmelCase : Optional[int] = np.transpose(SCREAMING_SNAKE_CASE__ ,[2, 0, 1] ) lowerCAmelCase : Union[str, Any] = torch.from_numpy(SCREAMING_SNAKE_CASE__ ).unsqueeze(0 ) return image class _a ( snake_case_ ): def __init__( self , lowercase_ , lowercase_ , lowercase_ , ) -> int: super().__init__() self.register_modules( unet=lowercase_ , scheduler=lowercase_ , movq=lowercase_ , ) lowerCAmelCase : Tuple = 2 ** (len(self.movq.config.block_out_channels ) - 1) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> str: # get the original timestep using init_timestep lowerCAmelCase : Dict = min(int(num_inference_steps * strength ) , lowercase_ ) lowerCAmelCase : Optional[int] = max(num_inference_steps - init_timestep , 0 ) lowerCAmelCase : List[Any] = self.scheduler.timesteps[t_start:] return timesteps, num_inference_steps - t_start def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_=None ) -> Dict: if not isinstance(lowercase_ , (torch.Tensor, PIL.Image.Image, list) ): raise ValueError( f"""`image` has to be of type `torch.Tensor`, `PIL.Image.Image` or list but is {type(lowercase_ )}""" ) lowerCAmelCase : List[Any] = image.to(device=lowercase_ , dtype=lowercase_ ) lowerCAmelCase : int = batch_size * num_images_per_prompt if image.shape[1] == 4: lowerCAmelCase : int = image else: if isinstance(lowercase_ , lowercase_ ) and len(lowercase_ ) != batch_size: raise ValueError( f"""You have passed a list of generators of length {len(lowercase_ )}, but requested an effective batch""" f""" size of {batch_size}. Make sure the batch size matches the length of the generators.""" ) elif isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = [ self.movq.encode(image[i : i + 1] ).latent_dist.sample(generator[i] ) for i in range(lowercase_ ) ] lowerCAmelCase : List[str] = torch.cat(lowercase_ , dim=0 ) else: lowerCAmelCase : List[str] = self.movq.encode(lowercase_ ).latent_dist.sample(lowercase_ ) lowerCAmelCase : Tuple = self.movq.config.scaling_factor * init_latents lowerCAmelCase : Optional[int] = torch.cat([init_latents] , dim=0 ) lowerCAmelCase : Union[str, Any] = init_latents.shape lowerCAmelCase : Union[str, Any] = randn_tensor(lowercase_ , generator=lowercase_ , device=lowercase_ , dtype=lowercase_ ) # get latents lowerCAmelCase : Tuple = self.scheduler.add_noise(lowercase_ , lowercase_ , lowercase_ ) lowerCAmelCase : Optional[int] = init_latents return latents def _snake_case ( self , lowercase_=0 ) -> Optional[int]: if is_accelerate_available(): from accelerate import cpu_offload else: raise ImportError("""Please install accelerate via `pip install accelerate`""" ) lowerCAmelCase : Optional[Any] = torch.device(f"""cuda:{gpu_id}""" ) lowerCAmelCase : str = [ self.unet, self.movq, ] for cpu_offloaded_model in models: if cpu_offloaded_model is not None: cpu_offload(lowercase_ , lowercase_ ) def _snake_case ( self , lowercase_=0 ) -> Union[str, Any]: if is_accelerate_available() and is_accelerate_version(""">=""" , """0.17.0.dev0""" ): from accelerate import cpu_offload_with_hook else: raise ImportError("""`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.""" ) lowerCAmelCase : int = torch.device(f"""cuda:{gpu_id}""" ) if self.device.type != "cpu": self.to("""cpu""" , silence_dtype_warnings=lowercase_ ) torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) lowerCAmelCase : Tuple = None for cpu_offloaded_model in [self.unet, self.movq]: lowerCAmelCase , lowerCAmelCase : str = cpu_offload_with_hook(lowercase_ , lowercase_ , prev_module_hook=lowercase_ ) # We'll offload the last model manually. lowerCAmelCase : List[str] = hook @property # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device def _snake_case ( self ) -> int: if not hasattr(self.unet , """_hf_hook""" ): return self.device for module in self.unet.modules(): if ( hasattr(lowercase_ , """_hf_hook""" ) and hasattr(module._hf_hook , """execution_device""" ) and module._hf_hook.execution_device is not None ): return torch.device(module._hf_hook.execution_device ) return self.device @torch.no_grad() @replace_example_docstring(lowercase_ ) def __call__( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ = 512 , lowercase_ = 512 , lowercase_ = 100 , lowercase_ = 4.0 , lowercase_ = 0.3 , lowercase_ = 1 , lowercase_ = None , lowercase_ = "pil" , lowercase_ = True , ) -> Optional[Any]: lowerCAmelCase : Optional[int] = self._execution_device lowerCAmelCase : Dict = guidance_scale > 1.0 if isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : str = torch.cat(lowercase_ , dim=0 ) lowerCAmelCase : List[str] = image_embeds.shape[0] if isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : str = torch.cat(lowercase_ , dim=0 ) if do_classifier_free_guidance: lowerCAmelCase : Union[str, Any] = image_embeds.repeat_interleave(lowercase_ , dim=0 ) lowerCAmelCase : Tuple = negative_image_embeds.repeat_interleave(lowercase_ , dim=0 ) lowerCAmelCase : Any = torch.cat([negative_image_embeds, image_embeds] , dim=0 ).to(dtype=self.unet.dtype , device=lowercase_ ) if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Optional[int] = [image] if not all(isinstance(lowercase_ , (PIL.Image.Image, torch.Tensor) ) for i in image ): raise ValueError( f"""Input is in incorrect format: {[type(lowercase_ ) for i in image]}. Currently, we only support PIL image and pytorch tensor""" ) lowerCAmelCase : Any = torch.cat([prepare_image(lowercase_ , lowercase_ , lowercase_ ) for i in image] , dim=0 ) lowerCAmelCase : Optional[int] = image.to(dtype=image_embeds.dtype , device=lowercase_ ) lowerCAmelCase : List[Any] = self.movq.encode(lowercase_ )["""latents"""] lowerCAmelCase : int = latents.repeat_interleave(lowercase_ , dim=0 ) self.scheduler.set_timesteps(lowercase_ , device=lowercase_ ) lowerCAmelCase , lowerCAmelCase : Optional[Any] = self.get_timesteps(lowercase_ , lowercase_ , lowercase_ ) lowerCAmelCase : int = timesteps[:1].repeat(batch_size * num_images_per_prompt ) lowerCAmelCase , lowerCAmelCase : List[str] = downscale_height_and_width(lowercase_ , lowercase_ , self.movq_scale_factor ) lowerCAmelCase : List[Any] = self.prepare_latents( lowercase_ , lowercase_ , lowercase_ , lowercase_ , image_embeds.dtype , lowercase_ , lowercase_ ) for i, t in enumerate(self.progress_bar(lowercase_ ) ): # expand the latents if we are doing classifier free guidance lowerCAmelCase : List[str] = torch.cat([latents] * 2 ) if do_classifier_free_guidance else latents lowerCAmelCase : int = {"""image_embeds""": image_embeds} lowerCAmelCase : Tuple = self.unet( sample=lowercase_ , timestep=lowercase_ , encoder_hidden_states=lowercase_ , added_cond_kwargs=lowercase_ , return_dict=lowercase_ , )[0] if do_classifier_free_guidance: lowerCAmelCase , lowerCAmelCase : Any = noise_pred.split(latents.shape[1] , dim=1 ) lowerCAmelCase , lowerCAmelCase : Any = noise_pred.chunk(2 ) lowerCAmelCase , lowerCAmelCase : List[Any] = variance_pred.chunk(2 ) lowerCAmelCase : Dict = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) lowerCAmelCase : Tuple = torch.cat([noise_pred, variance_pred_text] , dim=1 ) if not ( hasattr(self.scheduler.config , """variance_type""" ) and self.scheduler.config.variance_type in ["learned", "learned_range"] ): lowerCAmelCase , lowerCAmelCase : Dict = noise_pred.split(latents.shape[1] , dim=1 ) # compute the previous noisy sample x_t -> x_t-1 lowerCAmelCase : Any = self.scheduler.step( lowercase_ , lowercase_ , lowercase_ , generator=lowercase_ , )[0] # post-processing lowerCAmelCase : Optional[int] = self.movq.decode(lowercase_ , force_not_quantize=lowercase_ )["""sample"""] if output_type not in ["pt", "np", "pil"]: raise ValueError(f"""Only the output types `pt`, `pil` and `np` are supported not output_type={output_type}""" ) if output_type in ["np", "pil"]: lowerCAmelCase : Optional[Any] = image * 0.5 + 0.5 lowerCAmelCase : Dict = image.clamp(0 , 1 ) lowerCAmelCase : List[Any] = image.cpu().permute(0 , 2 , 3 , 1 ).float().numpy() if output_type == "pil": lowerCAmelCase : str = self.numpy_to_pil(lowercase_ ) if not return_dict: return (image,) return ImagePipelineOutput(images=lowercase_ )
693
from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase : int =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={ 'microsoft/swin-tiny-patch4-window7-224': ( 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json' ), # See all Swin models at https://huggingface.co/models?filter=swin } class _a ( snake_case_ , snake_case_ ): _UpperCamelCase: int = "swin" _UpperCamelCase: str = { "num_attention_heads": "num_heads", "num_hidden_layers": "num_layers", } def __init__( self , lowercase_=224 , lowercase_=4 , lowercase_=3 , lowercase_=96 , lowercase_=[2, 2, 6, 2] , lowercase_=[3, 6, 12, 24] , lowercase_=7 , lowercase_=4.0 , lowercase_=True , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.1 , lowercase_="gelu" , lowercase_=False , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=32 , lowercase_=None , lowercase_=None , **lowercase_ , ) -> Tuple: super().__init__(**lowercase_ ) lowerCAmelCase : Optional[int] = image_size lowerCAmelCase : Optional[Any] = patch_size lowerCAmelCase : Optional[Any] = num_channels lowerCAmelCase : List[Any] = embed_dim lowerCAmelCase : str = depths lowerCAmelCase : List[str] = len(lowercase_ ) lowerCAmelCase : Any = num_heads lowerCAmelCase : str = window_size lowerCAmelCase : List[str] = mlp_ratio lowerCAmelCase : List[Any] = qkv_bias lowerCAmelCase : List[str] = hidden_dropout_prob lowerCAmelCase : int = attention_probs_dropout_prob lowerCAmelCase : Any = drop_path_rate lowerCAmelCase : int = hidden_act lowerCAmelCase : int = use_absolute_embeddings lowerCAmelCase : Dict = layer_norm_eps lowerCAmelCase : Any = initializer_range lowerCAmelCase : Dict = encoder_stride # we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel # this indicates the channel dimension after the last stage of the model lowerCAmelCase : Any = int(embed_dim * 2 ** (len(lowercase_ ) - 1) ) lowerCAmelCase : Dict = ["""stem"""] + [f"""stage{idx}""" for idx in range(1 , len(lowercase_ ) + 1 )] lowerCAmelCase , lowerCAmelCase : Optional[Any] = get_aligned_output_features_output_indices( out_features=lowercase_ , out_indices=lowercase_ , stage_names=self.stage_names ) class _a ( snake_case_ ): _UpperCamelCase: int = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-4
693
1
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Any ={ 'funnel-transformer/small': 'https://huggingface.co/funnel-transformer/small/resolve/main/config.json', 'funnel-transformer/small-base': 'https://huggingface.co/funnel-transformer/small-base/resolve/main/config.json', 'funnel-transformer/medium': 'https://huggingface.co/funnel-transformer/medium/resolve/main/config.json', 'funnel-transformer/medium-base': 'https://huggingface.co/funnel-transformer/medium-base/resolve/main/config.json', 'funnel-transformer/intermediate': ( 'https://huggingface.co/funnel-transformer/intermediate/resolve/main/config.json' ), 'funnel-transformer/intermediate-base': ( 'https://huggingface.co/funnel-transformer/intermediate-base/resolve/main/config.json' ), 'funnel-transformer/large': 'https://huggingface.co/funnel-transformer/large/resolve/main/config.json', 'funnel-transformer/large-base': 'https://huggingface.co/funnel-transformer/large-base/resolve/main/config.json', 'funnel-transformer/xlarge': 'https://huggingface.co/funnel-transformer/xlarge/resolve/main/config.json', 'funnel-transformer/xlarge-base': 'https://huggingface.co/funnel-transformer/xlarge-base/resolve/main/config.json', } class _a ( snake_case_ ): _UpperCamelCase: List[str] = "funnel" _UpperCamelCase: List[str] = { "hidden_size": "d_model", "num_attention_heads": "n_head", } def __init__( self , lowercase_=30522 , lowercase_=[4, 4, 4] , lowercase_=None , lowercase_=2 , lowercase_=768 , lowercase_=12 , lowercase_=64 , lowercase_=3072 , lowercase_="gelu_new" , lowercase_=0.1 , lowercase_=0.1 , lowercase_=0.0 , lowercase_=0.1 , lowercase_=None , lowercase_=1e-9 , lowercase_="mean" , lowercase_="relative_shift" , lowercase_=True , lowercase_=True , lowercase_=True , **lowercase_ , ) -> Tuple: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : int = block_sizes lowerCAmelCase : Tuple = [1] * len(lowercase_ ) if block_repeats is None else block_repeats assert len(lowercase_ ) == len( self.block_repeats ), "`block_sizes` and `block_repeats` should have the same length." lowerCAmelCase : int = num_decoder_layers lowerCAmelCase : int = d_model lowerCAmelCase : List[Any] = n_head lowerCAmelCase : str = d_head lowerCAmelCase : int = d_inner lowerCAmelCase : List[str] = hidden_act lowerCAmelCase : str = hidden_dropout lowerCAmelCase : Optional[int] = attention_dropout lowerCAmelCase : Any = activation_dropout lowerCAmelCase : Any = initializer_range lowerCAmelCase : Union[str, Any] = initializer_std lowerCAmelCase : Dict = layer_norm_eps assert pooling_type in [ "mean", "max", ], f"""Got {pooling_type} for `pooling_type` but only 'mean' and 'max' are supported.""" lowerCAmelCase : Dict = pooling_type assert attention_type in [ "relative_shift", "factorized", ], f"""Got {attention_type} for `attention_type` but only 'relative_shift' and 'factorized' are supported.""" lowerCAmelCase : Any = attention_type lowerCAmelCase : Union[str, Any] = separate_cls lowerCAmelCase : Optional[int] = truncate_seq lowerCAmelCase : Optional[Any] = pool_q_only super().__init__(**lowercase_ ) @property def _snake_case ( self ) -> Optional[int]: return sum(self.block_sizes ) @num_hidden_layers.setter def _snake_case ( self , lowercase_ ) -> Dict: raise NotImplementedError( """This model does not support the setting of `num_hidden_layers`. Please set `block_sizes`.""" ) @property def _snake_case ( self ) -> Dict: return len(self.block_sizes ) @num_blocks.setter def _snake_case ( self , lowercase_ ) -> List[Any]: raise NotImplementedError("""This model does not support the setting of `num_blocks`. Please set `block_sizes`.""" )
693
lowerCAmelCase : str ={ 'Pillow': 'Pillow<10.0.0', 'accelerate': 'accelerate>=0.20.3', 'av': 'av==9.2.0', 'beautifulsoup4': 'beautifulsoup4', 'black': 'black~=23.1', 'codecarbon': 'codecarbon==1.2.0', 'cookiecutter': 'cookiecutter==1.7.3', 'dataclasses': 'dataclasses', 'datasets': 'datasets!=2.5.0', 'decord': 'decord==0.6.0', 'deepspeed': 'deepspeed>=0.9.3', 'diffusers': 'diffusers', 'dill': 'dill<0.3.5', 'evaluate': 'evaluate>=0.2.0', 'fairscale': 'fairscale>0.3', 'faiss-cpu': 'faiss-cpu', 'fastapi': 'fastapi', 'filelock': 'filelock', 'flax': 'flax>=0.4.1,<=0.7.0', 'ftfy': 'ftfy', 'fugashi': 'fugashi>=1.0', 'GitPython': 'GitPython<3.1.19', 'hf-doc-builder': 'hf-doc-builder>=0.3.0', 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0', 'importlib_metadata': 'importlib_metadata', 'ipadic': 'ipadic>=1.0.0,<2.0', 'isort': 'isort>=5.5.4', 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13', 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13', 'jieba': 'jieba', 'kenlm': 'kenlm', 'keras-nlp': 'keras-nlp>=0.3.1', 'librosa': 'librosa', 'nltk': 'nltk', 'natten': 'natten>=0.14.6', 'numpy': 'numpy>=1.17', 'onnxconverter-common': 'onnxconverter-common', 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2', 'onnxruntime': 'onnxruntime>=1.4.0', 'opencv-python': 'opencv-python', 'optuna': 'optuna', 'optax': 'optax>=0.0.8,<=0.1.4', 'packaging': 'packaging>=20.0', 'parameterized': 'parameterized', 'phonemizer': 'phonemizer', 'protobuf': 'protobuf', 'psutil': 'psutil', 'pyyaml': 'pyyaml>=5.1', 'pydantic': 'pydantic<2', 'pytest': 'pytest>=7.2.0', 'pytest-timeout': 'pytest-timeout', 'pytest-xdist': 'pytest-xdist', 'python': 'python>=3.8.0', 'ray[tune]': 'ray[tune]', 'regex': 'regex!=2019.12.17', 'requests': 'requests', 'rhoknp': 'rhoknp>=1.1.0,<1.3.1', 'rjieba': 'rjieba', 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1', 'ruff': 'ruff>=0.0.241,<=0.0.259', 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0', 'sacremoses': 'sacremoses', 'safetensors': 'safetensors>=0.3.1', 'sagemaker': 'sagemaker>=2.31.0', 'scikit-learn': 'scikit-learn', 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92', 'sigopt': 'sigopt', 'starlette': 'starlette', 'sudachipy': 'sudachipy>=0.6.6', 'sudachidict_core': 'sudachidict_core>=20220729', 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14', 'tensorflow': 'tensorflow>=2.6,<2.14', 'tensorflow-text': 'tensorflow-text<2.14', 'tf2onnx': 'tf2onnx', 'timeout-decorator': 'timeout-decorator', 'timm': 'timm', 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14', 'torch': 'torch>=1.9,!=1.12.0', 'torchaudio': 'torchaudio', 'torchvision': 'torchvision', 'pyctcdecode': 'pyctcdecode>=0.4.0', 'tqdm': 'tqdm>=4.27', 'unidic': 'unidic>=1.0.2', 'unidic_lite': 'unidic_lite>=1.0.7', 'urllib3': 'urllib3<2.0.0', 'uvicorn': 'uvicorn', }
693
1
import unittest from transformers import GPTNeoXJapaneseConfig, is_torch_available from transformers.models.gpt_neox_japanese.tokenization_gpt_neox_japanese import GPTNeoXJapaneseTokenizer from transformers.testing_utils import require_torch, slow, torch_device from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, ids_tensor, random_attention_mask from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from transformers import GPTNeoXJapaneseForCausalLM, GPTNeoXJapaneseModel class _a : def __init__( self , lowercase_ , lowercase_=13 , lowercase_=7 , lowercase_=True , lowercase_=True , lowercase_=True , lowercase_=True , lowercase_=99 , lowercase_=32 , lowercase_=5 , lowercase_=4 , lowercase_=4 , lowercase_="gelu" , lowercase_=0.0 , lowercase_=0.1 , lowercase_=True , lowercase_=512 , lowercase_=16 , lowercase_=2 , lowercase_=0.0_2 , lowercase_=3 , lowercase_=4 , lowercase_=None , ) -> List[Any]: lowerCAmelCase : List[str] = parent lowerCAmelCase : str = batch_size lowerCAmelCase : Dict = seq_length lowerCAmelCase : Optional[Any] = is_training lowerCAmelCase : List[Any] = use_input_mask lowerCAmelCase : Tuple = use_token_type_ids lowerCAmelCase : List[Any] = use_labels lowerCAmelCase : List[Any] = vocab_size lowerCAmelCase : List[str] = hidden_size lowerCAmelCase : Dict = num_hidden_layers lowerCAmelCase : Optional[int] = num_attention_heads lowerCAmelCase : Optional[Any] = intermediate_multiple_size lowerCAmelCase : str = hidden_act lowerCAmelCase : Tuple = hidden_dropout lowerCAmelCase : Optional[int] = attention_dropout lowerCAmelCase : Optional[Any] = weight_tying lowerCAmelCase : Optional[int] = max_position_embeddings lowerCAmelCase : Tuple = type_vocab_size lowerCAmelCase : Union[str, Any] = type_sequence_label_size lowerCAmelCase : Optional[int] = initializer_range lowerCAmelCase : List[str] = num_labels lowerCAmelCase : Optional[int] = num_choices lowerCAmelCase : Any = scope def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : str = ids_tensor([self.batch_size, self.seq_length] , self.vocab_size ) lowerCAmelCase : Any = None if self.use_input_mask: lowerCAmelCase : Tuple = random_attention_mask([self.batch_size, self.seq_length] ) lowerCAmelCase : Any = None if self.use_labels: lowerCAmelCase : Tuple = ids_tensor([self.batch_size, self.seq_length] , self.num_labels ) lowerCAmelCase : Union[str, Any] = self.get_config() return config, input_ids, input_mask, token_labels def _snake_case ( self ) -> str: return GPTNeoXJapaneseConfig( vocab_size=self.vocab_size , hidden_size=self.hidden_size , num_hidden_layers=self.num_hidden_layers , num_attention_heads=self.num_attention_heads , intermediate_multiple_size=self.intermediate_multiple_size , hidden_act=self.hidden_act , hidden_dropout=self.hidden_dropout , attention_dropout=self.attention_dropout , weight_tying=self.weight_tying , max_position_embeddings=self.max_position_embeddings , type_vocab_size=self.type_vocab_size , is_decoder=lowercase_ , initializer_range=self.initializer_range , ) def _snake_case ( self ) -> str: lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : int = self.prepare_config_and_inputs() lowerCAmelCase : Tuple = True return config, input_ids, input_mask, token_labels def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> List[str]: lowerCAmelCase : Dict = GPTNeoXJapaneseModel(config=lowercase_ ) model.to(lowercase_ ) model.eval() lowerCAmelCase : Optional[Any] = model(lowercase_ , attention_mask=lowercase_ ) lowerCAmelCase : List[Any] = model(lowercase_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> Tuple: lowerCAmelCase : Tuple = True lowerCAmelCase : str = GPTNeoXJapaneseModel(lowercase_ ) model.to(lowercase_ ) model.eval() lowerCAmelCase : Any = model(lowercase_ , attention_mask=lowercase_ ) self.parent.assertEqual(result.last_hidden_state.shape , (self.batch_size, self.seq_length, self.hidden_size) ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = GPTNeoXJapaneseForCausalLM(config=lowercase_ ) model.to(lowercase_ ) model.eval() lowerCAmelCase : Union[str, Any] = model(lowercase_ , attention_mask=lowercase_ , labels=lowercase_ ) self.parent.assertEqual(result.logits.shape , (self.batch_size, self.seq_length, self.vocab_size) ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> Any: lowerCAmelCase : Dict = True lowerCAmelCase : Optional[int] = GPTNeoXJapaneseForCausalLM(config=lowercase_ ) model.to(lowercase_ ) model.eval() # first forward pass lowerCAmelCase : Dict = model(lowercase_ , attention_mask=lowercase_ , use_cache=lowercase_ ) lowerCAmelCase : Union[str, Any] = outputs.past_key_values # create hypothetical multiple next token and extent to next_input_ids lowerCAmelCase : Tuple = ids_tensor((self.batch_size, 3) , config.vocab_size ) lowerCAmelCase : List[Any] = ids_tensor((self.batch_size, 3) , vocab_size=2 ) # append to next input_ids and lowerCAmelCase : str = torch.cat([input_ids, next_tokens] , dim=-1 ) lowerCAmelCase : Tuple = torch.cat([input_mask, next_mask] , dim=-1 ) lowerCAmelCase : int = model(lowercase_ , attention_mask=lowercase_ , output_hidden_states=lowercase_ ) lowerCAmelCase : List[str] = output_from_no_past["""hidden_states"""][0] lowerCAmelCase : List[Any] = model( lowercase_ , attention_mask=lowercase_ , past_key_values=lowercase_ , output_hidden_states=lowercase_ , )["""hidden_states"""][0] # select random slice lowerCAmelCase : Union[str, Any] = ids_tensor((1,) , output_from_past.shape[-1] ).item() lowerCAmelCase : Tuple = output_from_no_past[:, -3:, random_slice_idx].detach() lowerCAmelCase : int = output_from_past[:, :, random_slice_idx].detach() self.parent.assertTrue(output_from_past_slice.shape[1] == next_tokens.shape[1] ) # test that outputs are equal for slice self.parent.assertTrue(torch.allclose(lowercase_ , lowercase_ , atol=1e-3 ) ) def _snake_case ( self ) -> Dict: lowerCAmelCase : List[Any] = self.prepare_config_and_inputs() lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Tuple = config_and_inputs lowerCAmelCase : int = {"""input_ids""": input_ids, """attention_mask""": input_mask} return config, inputs_dict @require_torch class _a ( snake_case_ , snake_case_ , unittest.TestCase ): _UpperCamelCase: str = (GPTNeoXJapaneseModel, GPTNeoXJapaneseForCausalLM) if is_torch_available() else () _UpperCamelCase: Tuple = (GPTNeoXJapaneseForCausalLM,) if is_torch_available() else () _UpperCamelCase: List[Any] = ( {"feature-extraction": GPTNeoXJapaneseModel, "text-generation": GPTNeoXJapaneseForCausalLM} if is_torch_available() else {} ) _UpperCamelCase: List[str] = False _UpperCamelCase: List[Any] = False _UpperCamelCase: Union[str, Any] = False _UpperCamelCase: str = False def _snake_case ( self ) -> int: lowerCAmelCase : List[Any] = GPTNeoXJapaneseModelTester(self ) lowerCAmelCase : Optional[Any] = ConfigTester(self , config_class=lowercase_ , hidden_size=37 ) def _snake_case ( self ) -> str: self.config_tester.run_common_tests() def _snake_case ( self ) -> Optional[int]: lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : List[str] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_model(lowercase_ , lowercase_ , lowercase_ ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : int = self.model_tester.prepare_config_and_inputs_for_decoder() self.model_tester.create_and_check_model_as_decoder(lowercase_ , lowercase_ , lowercase_ ) def _snake_case ( self ) -> Optional[int]: # This regression test was failing with PyTorch < 1.3 lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : List[str] = self.model_tester.prepare_config_and_inputs_for_decoder() lowerCAmelCase : int = None self.model_tester.create_and_check_model_as_decoder(lowercase_ , lowercase_ , lowercase_ ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_decoder_model_past_large_inputs(lowercase_ , lowercase_ , lowercase_ ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Optional[int] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_for_causal_lm(*lowercase_ ) @slow def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : List[Any] = """abeja/gpt-neox-japanese-2.7b""" lowerCAmelCase : Any = ["""データサイエンティストとは、""", """100年後に必要とされる会社は、""", """フルリモートの環境で働くために必要なことは、""", """国境の長いトンネルを抜けると""", """美味しい日本食といえば、"""] lowerCAmelCase : Optional[int] = [ """データサイエンティストとは、データを分析し、ビジネスに役立つ知見を導き出す専門家のことです。""", """100年後に必要とされる会社は、「人」が中心の会社です。""", """フルリモートの環境で働くために必要なことは、「自分の時間をコントロールする」ことです。""", """国境の長いトンネルを抜けると、そこは雪国だった。""", """美味しい日本食といえば、やっぱりお寿司ですよね。""", ] lowerCAmelCase : Dict = GPTNeoXJapaneseTokenizer.from_pretrained(lowercase_ ) lowerCAmelCase : Optional[int] = GPTNeoXJapaneseForCausalLM.from_pretrained(lowercase_ ) lowerCAmelCase : Optional[Any] = [] for prompt in prompts: lowerCAmelCase : Dict = tokenizer(lowercase_ , return_tensors="""pt""" ).input_ids lowerCAmelCase : Optional[Any] = model.generate(lowercase_ , max_length=50 ) lowerCAmelCase : Tuple = tokenizer.batch_decode(lowercase_ , skip_special_tokens=lowercase_ ) predicted_outputs += generated_string self.assertListEqual(lowercase_ , lowercase_ )
693
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase : Union[str, Any] ={ 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'], 'tokenization_roformer': ['RoFormerTokenizer'], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =['RoFormerTokenizerFast'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Optional[int] =[ 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'RoFormerForCausalLM', 'RoFormerForMaskedLM', 'RoFormerForMultipleChoice', 'RoFormerForQuestionAnswering', 'RoFormerForSequenceClassification', 'RoFormerForTokenClassification', 'RoFormerLayer', 'RoFormerModel', 'RoFormerPreTrainedModel', 'load_tf_weights_in_roformer', ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'TFRoFormerForCausalLM', 'TFRoFormerForMaskedLM', 'TFRoFormerForMultipleChoice', 'TFRoFormerForQuestionAnswering', 'TFRoFormerForSequenceClassification', 'TFRoFormerForTokenClassification', 'TFRoFormerLayer', 'TFRoFormerModel', 'TFRoFormerPreTrainedModel', ] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : int =[ 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'FlaxRoFormerForMaskedLM', 'FlaxRoFormerForMultipleChoice', 'FlaxRoFormerForQuestionAnswering', 'FlaxRoFormerForSequenceClassification', 'FlaxRoFormerForTokenClassification', 'FlaxRoFormerModel', 'FlaxRoFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig from .tokenization_roformer import RoFormerTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_roformer_fast import RoFormerTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_roformer import ( ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, RoFormerForCausalLM, RoFormerForMaskedLM, RoFormerForMultipleChoice, RoFormerForQuestionAnswering, RoFormerForSequenceClassification, RoFormerForTokenClassification, RoFormerLayer, RoFormerModel, RoFormerPreTrainedModel, load_tf_weights_in_roformer, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_roformer import ( TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerLayer, TFRoFormerModel, TFRoFormerPreTrainedModel, ) try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_roformer import ( FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, FlaxRoFormerForMaskedLM, FlaxRoFormerForMultipleChoice, FlaxRoFormerForQuestionAnswering, FlaxRoFormerForSequenceClassification, FlaxRoFormerForTokenClassification, FlaxRoFormerModel, FlaxRoFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Tuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
1
import unittest from transformers import AlbertTokenizer, AlbertTokenizerFast from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from ...test_tokenization_common import TokenizerTesterMixin lowerCAmelCase : List[Any] =get_tests_dir('fixtures/spiece.model') @require_sentencepiece @require_tokenizers class _a ( snake_case_ , unittest.TestCase ): _UpperCamelCase: Tuple = AlbertTokenizer _UpperCamelCase: Any = AlbertTokenizerFast _UpperCamelCase: List[Any] = True _UpperCamelCase: Optional[int] = True _UpperCamelCase: Optional[int] = True def _snake_case ( self ) -> List[Any]: super().setUp() # We have a SentencePiece fixture for testing lowerCAmelCase : List[str] = AlbertTokenizer(lowercase_ ) tokenizer.save_pretrained(self.tmpdirname ) def _snake_case ( self , lowercase_ ) -> List[Any]: lowerCAmelCase : Union[str, Any] = """this is a test""" lowerCAmelCase : str = """this is a test""" return input_text, output_text def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[str] = """<pad>""" lowerCAmelCase : str = 0 self.assertEqual(self.get_tokenizer()._convert_token_to_id(lowercase_ ) , lowercase_ ) self.assertEqual(self.get_tokenizer()._convert_id_to_token(lowercase_ ) , lowercase_ ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Optional[int] = list(self.get_tokenizer().get_vocab().keys() ) self.assertEqual(vocab_keys[0] , """<pad>""" ) self.assertEqual(vocab_keys[1] , """<unk>""" ) self.assertEqual(vocab_keys[-1] , """▁eloquent""" ) self.assertEqual(len(lowercase_ ) , 30000 ) def _snake_case ( self ) -> Union[str, Any]: self.assertEqual(self.get_tokenizer().vocab_size , 30000 ) def _snake_case ( self ) -> str: if not self.test_rust_tokenizer: return lowerCAmelCase : Union[str, Any] = self.get_tokenizer() lowerCAmelCase : Any = self.get_rust_tokenizer() lowerCAmelCase : List[Any] = """I was born in 92000, and this is falsé.""" lowerCAmelCase : Tuple = tokenizer.tokenize(lowercase_ ) lowerCAmelCase : List[Any] = rust_tokenizer.tokenize(lowercase_ ) self.assertListEqual(lowercase_ , lowercase_ ) lowerCAmelCase : Dict = tokenizer.encode(lowercase_ , add_special_tokens=lowercase_ ) lowerCAmelCase : List[str] = rust_tokenizer.encode(lowercase_ , add_special_tokens=lowercase_ ) self.assertListEqual(lowercase_ , lowercase_ ) lowerCAmelCase : List[str] = self.get_rust_tokenizer() lowerCAmelCase : Any = tokenizer.encode(lowercase_ ) lowerCAmelCase : Dict = rust_tokenizer.encode(lowercase_ ) self.assertListEqual(lowercase_ , lowercase_ ) def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Dict = AlbertTokenizer(lowercase_ , keep_accents=lowercase_ ) lowerCAmelCase : Union[str, Any] = tokenizer.tokenize("""This is a test""" ) self.assertListEqual(lowercase_ , ["""▁this""", """▁is""", """▁a""", """▁test"""] ) self.assertListEqual(tokenizer.convert_tokens_to_ids(lowercase_ ) , [48, 25, 21, 1289] ) lowerCAmelCase : Dict = tokenizer.tokenize("""I was born in 92000, and this is falsé.""" ) self.assertListEqual( lowercase_ , ["""▁i""", """▁was""", """▁born""", """▁in""", """▁9""", """2000""", """,""", """▁and""", """▁this""", """▁is""", """▁fal""", """s""", """é""", """."""] ) lowerCAmelCase : Dict = tokenizer.convert_tokens_to_ids(lowercase_ ) self.assertListEqual(lowercase_ , [31, 23, 386, 19, 561, 3050, 15, 17, 48, 25, 8256, 18, 1, 9] ) lowerCAmelCase : List[str] = tokenizer.convert_ids_to_tokens(lowercase_ ) self.assertListEqual( lowercase_ , ["""▁i""", """▁was""", """▁born""", """▁in""", """▁9""", """2000""", """,""", """▁and""", """▁this""", """▁is""", """▁fal""", """s""", """<unk>""", """."""] , ) def _snake_case ( self ) -> str: lowerCAmelCase : str = AlbertTokenizer(lowercase_ ) lowerCAmelCase : Optional[int] = tokenizer.encode("""sequence builders""" ) lowerCAmelCase : Tuple = tokenizer.encode("""multi-sequence build""" ) lowerCAmelCase : Dict = tokenizer.build_inputs_with_special_tokens(lowercase_ ) lowerCAmelCase : Tuple = tokenizer.build_inputs_with_special_tokens(lowercase_ , lowercase_ ) assert encoded_sentence == [tokenizer.cls_token_id] + text + [tokenizer.sep_token_id] assert encoded_pair == [tokenizer.cls_token_id] + text + [tokenizer.sep_token_id] + text_a + [ tokenizer.sep_token_id ] @slow def _snake_case ( self ) -> Optional[int]: # fmt: off lowerCAmelCase : Optional[int] = {"""attention_mask""": [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], """input_ids""": [[2, 21970, 13, 5, 6092, 167, 28, 7103, 2153, 673, 8, 7028, 12051, 18, 17, 7103, 2153, 673, 8, 3515, 18684, 8, 4461, 6, 1927, 297, 8, 12060, 2607, 18, 13, 5, 4461, 15, 10538, 38, 8, 135, 15, 822, 58, 15, 993, 10363, 15, 1460, 8005, 4461, 15, 993, 255, 2328, 9, 9, 9, 6, 26, 1112, 816, 3260, 13, 5, 103, 2377, 6, 17, 1112, 816, 2782, 13, 5, 103, 10641, 6, 29, 84, 2512, 2430, 782, 18684, 2761, 19, 808, 2430, 2556, 17, 855, 1480, 9477, 4091, 128, 11712, 15, 7103, 2153, 673, 17, 24883, 9990, 9, 3], [2, 11502, 25, 1006, 20, 782, 8, 11809, 855, 1732, 19393, 18667, 37, 367, 21018, 69, 1854, 34, 11860, 19124, 27, 156, 225, 17, 193, 4141, 19, 65, 9124, 9, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [2, 14, 2231, 886, 2385, 17659, 84, 14, 16792, 1952, 9, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], """token_type_ids""": [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]} # noqa: E501 # fmt: on self.tokenizer_integration_test_util( expected_encoding=lowercase_ , model_name="""albert-base-v2""" , revision="""6b6560eaf5ff2e250b00c50f380c5389a9c2d82e""" , )
693
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return int(input_a == input_a == 0 ) def _UpperCAmelCase ( ): '''simple docstring''' print("""Truth Table of NOR Gate:""" ) print("""| Input 1 | Input 2 | Output |""" ) print(F"""| 0 | 0 | {nor_gate(0 ,0 )} |""" ) print(F"""| 0 | 1 | {nor_gate(0 ,1 )} |""" ) print(F"""| 1 | 0 | {nor_gate(1 ,0 )} |""" ) print(F"""| 1 | 1 | {nor_gate(1 ,1 )} |""" ) if __name__ == "__main__": import doctest doctest.testmod() main()
693
1
from __future__ import annotations def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if len(SCREAMING_SNAKE_CASE__ ) <= 1 or n <= 1: return insert_next(SCREAMING_SNAKE_CASE__ ,n - 1 ) rec_insertion_sort(SCREAMING_SNAKE_CASE__ ,n - 1 ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if index >= len(SCREAMING_SNAKE_CASE__ ) or collection[index - 1] <= collection[index]: return # Swaps adjacent elements since they are not in ascending order lowerCAmelCase , lowerCAmelCase : Dict = ( collection[index], collection[index - 1], ) insert_next(SCREAMING_SNAKE_CASE__ ,index + 1 ) if __name__ == "__main__": lowerCAmelCase : int =input('Enter integers separated by spaces: ') lowerCAmelCase : list[int] =[int(num) for num in numbers.split()] rec_insertion_sort(number_list, len(number_list)) print(number_list)
693
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available lowerCAmelCase : int ={ 'configuration_poolformer': [ 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'PoolFormerConfig', 'PoolFormerOnnxConfig', ] } try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : List[Any] =['PoolFormerFeatureExtractor'] lowerCAmelCase : List[str] =['PoolFormerImageProcessor'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'PoolFormerForImageClassification', 'PoolFormerModel', 'PoolFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_poolformer import ( POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, PoolFormerConfig, PoolFormerOnnxConfig, ) try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .feature_extraction_poolformer import PoolFormerFeatureExtractor from .image_processing_poolformer import PoolFormerImageProcessor try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_poolformer import ( POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, PoolFormerForImageClassification, PoolFormerModel, PoolFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure)
693
1
import unittest from transformers.utils.backbone_utils import ( BackboneMixin, get_aligned_output_features_output_indices, verify_out_features_out_indices, ) class _a ( unittest.TestCase ): def _snake_case ( self ) -> str: lowerCAmelCase : Any = ["""a""", """b""", """c"""] # Defaults to last layer if both are None lowerCAmelCase , lowerCAmelCase : Optional[Any] = get_aligned_output_features_output_indices(lowercase_ , lowercase_ , lowercase_ ) self.assertEqual(lowercase_ , ["""c"""] ) self.assertEqual(lowercase_ , [2] ) # Out indices set to match out features lowerCAmelCase , lowerCAmelCase : Any = get_aligned_output_features_output_indices(["""a""", """c"""] , lowercase_ , lowercase_ ) self.assertEqual(lowercase_ , ["""a""", """c"""] ) self.assertEqual(lowercase_ , [0, 2] ) # Out features set to match out indices lowerCAmelCase , lowerCAmelCase : Any = get_aligned_output_features_output_indices(lowercase_ , [0, 2] , lowercase_ ) self.assertEqual(lowercase_ , ["""a""", """c"""] ) self.assertEqual(lowercase_ , [0, 2] ) # Out features selected from negative indices lowerCAmelCase , lowerCAmelCase : Tuple = get_aligned_output_features_output_indices(lowercase_ , [-3, -1] , lowercase_ ) self.assertEqual(lowercase_ , ["""a""", """c"""] ) self.assertEqual(lowercase_ , [-3, -1] ) def _snake_case ( self ) -> Union[str, Any]: # Stage names must be set with self.assertRaises(lowercase_ ): verify_out_features_out_indices(["""a""", """b"""] , (0, 1) , lowercase_ ) # Out features must be a list with self.assertRaises(lowercase_ ): verify_out_features_out_indices(("""a""", """b""") , (0, 1) , ["""a""", """b"""] ) # Out features must be a subset of stage names with self.assertRaises(lowercase_ ): verify_out_features_out_indices(["""a""", """b"""] , (0, 1) , ["""a"""] ) # Out indices must be a list or tuple with self.assertRaises(lowercase_ ): verify_out_features_out_indices(lowercase_ , 0 , ["""a""", """b"""] ) # Out indices must be a subset of stage names with self.assertRaises(lowercase_ ): verify_out_features_out_indices(lowercase_ , (0, 1) , ["""a"""] ) # Out features and out indices must be the same length with self.assertRaises(lowercase_ ): verify_out_features_out_indices(["""a""", """b"""] , (0,) , ["""a""", """b""", """c"""] ) # Out features should match out indices with self.assertRaises(lowercase_ ): verify_out_features_out_indices(["""a""", """b"""] , (0, 2) , ["""a""", """b""", """c"""] ) # Out features and out indices should be in order with self.assertRaises(lowercase_ ): verify_out_features_out_indices(["""b""", """a"""] , (0, 1) , ["""a""", """b"""] ) # Check passes with valid inputs verify_out_features_out_indices(["""a""", """b""", """d"""] , (0, 1, -1) , ["""a""", """b""", """c""", """d"""] ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Any = BackboneMixin() lowerCAmelCase : Optional[Any] = ["""a""", """b""", """c"""] lowerCAmelCase : Optional[int] = ["""a""", """c"""] lowerCAmelCase : int = [0, 2] # Check that the output features and indices are set correctly self.assertEqual(backbone.out_features , ["""a""", """c"""] ) self.assertEqual(backbone.out_indices , [0, 2] ) # Check out features and indices are updated correctly lowerCAmelCase : List[Any] = ["""a""", """b"""] self.assertEqual(backbone.out_features , ["""a""", """b"""] ) self.assertEqual(backbone.out_indices , [0, 1] ) lowerCAmelCase : Optional[int] = [-3, -1] self.assertEqual(backbone.out_features , ["""a""", """c"""] ) self.assertEqual(backbone.out_indices , [-3, -1] )
693
import os import string import sys lowerCAmelCase : Optional[int] =1 << 8 lowerCAmelCase : List[Any] ={ 'tab': ord('\t'), 'newline': ord('\r'), 'esc': 27, 'up': 65 + ARROW_KEY_FLAG, 'down': 66 + ARROW_KEY_FLAG, 'right': 67 + ARROW_KEY_FLAG, 'left': 68 + ARROW_KEY_FLAG, 'mod_int': 91, 'undefined': sys.maxsize, 'interrupt': 3, 'insert': 50, 'delete': 51, 'pg_up': 53, 'pg_down': 54, } lowerCAmelCase : Optional[Any] =KEYMAP['up'] lowerCAmelCase : Tuple =KEYMAP['left'] if sys.platform == "win32": lowerCAmelCase : Dict =[] lowerCAmelCase : int ={ b'\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\x00H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\x00P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\x00M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG, b'\x00K': KEYMAP['left'] - ARROW_KEY_FLAG, } for i in range(10): lowerCAmelCase : Optional[Any] =ord(str(i)) def _UpperCAmelCase ( ): '''simple docstring''' if os.name == "nt": import msvcrt lowerCAmelCase : Any = """mbcs""" # Flush the keyboard buffer while msvcrt.kbhit(): msvcrt.getch() if len(SCREAMING_SNAKE_CASE__ ) == 0: # Read the keystroke lowerCAmelCase : int = msvcrt.getch() # If it is a prefix char, get second part if ch in (b"\x00", b"\xe0"): lowerCAmelCase : Tuple = ch + msvcrt.getch() # Translate actual Win chars to bullet char types try: lowerCAmelCase : str = chr(WIN_KEYMAP[cha] ) WIN_CH_BUFFER.append(chr(KEYMAP["""mod_int"""] ) ) WIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ ) if ord(SCREAMING_SNAKE_CASE__ ) in ( KEYMAP["insert"] - 1 << 9, KEYMAP["delete"] - 1 << 9, KEYMAP["pg_up"] - 1 << 9, KEYMAP["pg_down"] - 1 << 9, ): WIN_CH_BUFFER.append(chr(1_2_6 ) ) lowerCAmelCase : Optional[Any] = chr(KEYMAP["""esc"""] ) except KeyError: lowerCAmelCase : Optional[int] = cha[1] else: lowerCAmelCase : Any = ch.decode(SCREAMING_SNAKE_CASE__ ) else: lowerCAmelCase : Optional[int] = WIN_CH_BUFFER.pop(0 ) elif os.name == "posix": import termios import tty lowerCAmelCase : List[Any] = sys.stdin.fileno() lowerCAmelCase : str = termios.tcgetattr(SCREAMING_SNAKE_CASE__ ) try: tty.setraw(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[Any] = sys.stdin.read(1 ) finally: termios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ ) return ch def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP["interrupt"], KEYMAP["newline"]]: return char elif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["esc"]: lowerCAmelCase : int = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["mod_int"]: lowerCAmelCase : Tuple = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP["arrow_begin"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP["arrow_end"] - ARROW_KEY_FLAG: return chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG ) else: return KEYMAP["undefined"] else: return get_raw_chars() else: if char in string.printable: return char else: return KEYMAP["undefined"]
693
1
import json import os import sys import tempfile import unittest from pathlib import Path from shutil import copyfile from huggingface_hub import HfFolder, Repository, create_repo, delete_repo from requests.exceptions import HTTPError import transformers from transformers import ( CONFIG_MAPPING, FEATURE_EXTRACTOR_MAPPING, PROCESSOR_MAPPING, TOKENIZER_MAPPING, AutoConfig, AutoFeatureExtractor, AutoProcessor, AutoTokenizer, BertTokenizer, ProcessorMixin, WavaVecaConfig, WavaVecaFeatureExtractor, WavaVecaProcessor, ) from transformers.testing_utils import TOKEN, USER, get_tests_dir, is_staging_test from transformers.tokenization_utils import TOKENIZER_CONFIG_FILE from transformers.utils import FEATURE_EXTRACTOR_NAME, is_tokenizers_available sys.path.append(str(Path(__file__).parent.parent.parent.parent / 'utils')) from test_module.custom_configuration import CustomConfig # noqa E402 from test_module.custom_feature_extraction import CustomFeatureExtractor # noqa E402 from test_module.custom_processing import CustomProcessor # noqa E402 from test_module.custom_tokenization import CustomTokenizer # noqa E402 lowerCAmelCase : Optional[int] =get_tests_dir('fixtures/dummy_feature_extractor_config.json') lowerCAmelCase : Any =get_tests_dir('fixtures/vocab.json') lowerCAmelCase : Union[str, Any] =get_tests_dir('fixtures') class _a ( unittest.TestCase ): _UpperCamelCase: Optional[Any] = ["[UNK]", "[CLS]", "[SEP]", "[PAD]", "[MASK]", "bla", "blou"] def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Optional[Any] = 0 def _snake_case ( self ) -> Dict: lowerCAmelCase : Optional[Any] = AutoProcessor.from_pretrained("""facebook/wav2vec2-base-960h""" ) self.assertIsInstance(lowercase_ , lowercase_ ) def _snake_case ( self ) -> int: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : Tuple = WavaVecaConfig() lowerCAmelCase : Dict = AutoProcessor.from_pretrained("""facebook/wav2vec2-base-960h""" ) # save in new folder model_config.save_pretrained(lowercase_ ) processor.save_pretrained(lowercase_ ) lowerCAmelCase : List[Any] = AutoProcessor.from_pretrained(lowercase_ ) self.assertIsInstance(lowercase_ , lowercase_ ) def _snake_case ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdirname: # copy relevant files copyfile(lowercase_ , os.path.join(lowercase_ , lowercase_ ) ) copyfile(lowercase_ , os.path.join(lowercase_ , """vocab.json""" ) ) lowerCAmelCase : Tuple = AutoProcessor.from_pretrained(lowercase_ ) self.assertIsInstance(lowercase_ , lowercase_ ) def _snake_case ( self ) -> List[Any]: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : List[Any] = WavaVecaFeatureExtractor() lowerCAmelCase : Optional[int] = AutoTokenizer.from_pretrained("""facebook/wav2vec2-base-960h""" ) lowerCAmelCase : Tuple = WavaVecaProcessor(lowercase_ , lowercase_ ) # save in new folder processor.save_pretrained(lowercase_ ) # drop `processor_class` in tokenizer with open(os.path.join(lowercase_ , lowercase_ ) , """r""" ) as f: lowerCAmelCase : Optional[Any] = json.load(lowercase_ ) config_dict.pop("""processor_class""" ) with open(os.path.join(lowercase_ , lowercase_ ) , """w""" ) as f: f.write(json.dumps(lowercase_ ) ) lowerCAmelCase : int = AutoProcessor.from_pretrained(lowercase_ ) self.assertIsInstance(lowercase_ , lowercase_ ) def _snake_case ( self ) -> List[str]: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : Tuple = WavaVecaFeatureExtractor() lowerCAmelCase : int = AutoTokenizer.from_pretrained("""facebook/wav2vec2-base-960h""" ) lowerCAmelCase : Dict = WavaVecaProcessor(lowercase_ , lowercase_ ) # save in new folder processor.save_pretrained(lowercase_ ) # drop `processor_class` in feature extractor with open(os.path.join(lowercase_ , lowercase_ ) , """r""" ) as f: lowerCAmelCase : List[str] = json.load(lowercase_ ) config_dict.pop("""processor_class""" ) with open(os.path.join(lowercase_ , lowercase_ ) , """w""" ) as f: f.write(json.dumps(lowercase_ ) ) lowerCAmelCase : List[str] = AutoProcessor.from_pretrained(lowercase_ ) self.assertIsInstance(lowercase_ , lowercase_ ) def _snake_case ( self ) -> Dict: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : List[str] = WavaVecaConfig(processor_class="""Wav2Vec2Processor""" ) model_config.save_pretrained(lowercase_ ) # copy relevant files copyfile(lowercase_ , os.path.join(lowercase_ , """vocab.json""" ) ) # create emtpy sample processor with open(os.path.join(lowercase_ , lowercase_ ) , """w""" ) as f: f.write("""{}""" ) lowerCAmelCase : int = AutoProcessor.from_pretrained(lowercase_ ) self.assertIsInstance(lowercase_ , lowercase_ ) def _snake_case ( self ) -> Dict: # If remote code is not set, we will time out when asking whether to load the model. with self.assertRaises(lowercase_ ): lowerCAmelCase : Tuple = AutoProcessor.from_pretrained("""hf-internal-testing/test_dynamic_processor""" ) # If remote code is disabled, we can't load this config. with self.assertRaises(lowercase_ ): lowerCAmelCase : Optional[int] = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=lowercase_ ) lowerCAmelCase : Tuple = AutoProcessor.from_pretrained("""hf-internal-testing/test_dynamic_processor""" , trust_remote_code=lowercase_ ) self.assertTrue(processor.special_attribute_present ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) lowerCAmelCase : List[str] = processor.feature_extractor self.assertTrue(feature_extractor.special_attribute_present ) self.assertEqual(feature_extractor.__class__.__name__ , """NewFeatureExtractor""" ) lowerCAmelCase : Union[str, Any] = processor.tokenizer self.assertTrue(tokenizer.special_attribute_present ) if is_tokenizers_available(): self.assertEqual(tokenizer.__class__.__name__ , """NewTokenizerFast""" ) # Test we can also load the slow version lowerCAmelCase : List[Any] = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=lowercase_ , use_fast=lowercase_ ) lowerCAmelCase : List[Any] = new_processor.tokenizer self.assertTrue(new_tokenizer.special_attribute_present ) self.assertEqual(new_tokenizer.__class__.__name__ , """NewTokenizer""" ) else: self.assertEqual(tokenizer.__class__.__name__ , """NewTokenizer""" ) def _snake_case ( self ) -> str: try: AutoConfig.register("""custom""" , lowercase_ ) AutoFeatureExtractor.register(lowercase_ , lowercase_ ) AutoTokenizer.register(lowercase_ , slow_tokenizer_class=lowercase_ ) AutoProcessor.register(lowercase_ , lowercase_ ) # Trying to register something existing in the Transformers library will raise an error with self.assertRaises(lowercase_ ): AutoProcessor.register(lowercase_ , lowercase_ ) # Now that the config is registered, it can be used as any other config with the auto-API lowerCAmelCase : Dict = CustomFeatureExtractor.from_pretrained(lowercase_ ) with tempfile.TemporaryDirectory() as tmp_dir: lowerCAmelCase : List[Any] = os.path.join(lowercase_ , """vocab.txt""" ) with open(lowercase_ , """w""" , encoding="""utf-8""" ) as vocab_writer: vocab_writer.write("""""".join([x + """\n""" for x in self.vocab_tokens] ) ) lowerCAmelCase : Any = CustomTokenizer(lowercase_ ) lowerCAmelCase : Tuple = CustomProcessor(lowercase_ , lowercase_ ) with tempfile.TemporaryDirectory() as tmp_dir: processor.save_pretrained(lowercase_ ) lowerCAmelCase : Optional[Any] = AutoProcessor.from_pretrained(lowercase_ ) self.assertIsInstance(lowercase_ , lowercase_ ) finally: if "custom" in CONFIG_MAPPING._extra_content: del CONFIG_MAPPING._extra_content["custom"] if CustomConfig in FEATURE_EXTRACTOR_MAPPING._extra_content: del FEATURE_EXTRACTOR_MAPPING._extra_content[CustomConfig] if CustomConfig in TOKENIZER_MAPPING._extra_content: del TOKENIZER_MAPPING._extra_content[CustomConfig] if CustomConfig in PROCESSOR_MAPPING._extra_content: del PROCESSOR_MAPPING._extra_content[CustomConfig] def _snake_case ( self ) -> Dict: class _a ( snake_case_ ): _UpperCamelCase: Any = False class _a ( snake_case_ ): _UpperCamelCase: Any = False class _a ( snake_case_ ): _UpperCamelCase: Any = "AutoFeatureExtractor" _UpperCamelCase: Optional[Any] = "AutoTokenizer" _UpperCamelCase: List[Any] = False try: AutoConfig.register("""custom""" , lowercase_ ) AutoFeatureExtractor.register(lowercase_ , lowercase_ ) AutoTokenizer.register(lowercase_ , slow_tokenizer_class=lowercase_ ) AutoProcessor.register(lowercase_ , lowercase_ ) # If remote code is not set, the default is to use local classes. lowerCAmelCase : List[str] = AutoProcessor.from_pretrained("""hf-internal-testing/test_dynamic_processor""" ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) self.assertFalse(processor.special_attribute_present ) self.assertFalse(processor.feature_extractor.special_attribute_present ) self.assertFalse(processor.tokenizer.special_attribute_present ) # If remote code is disabled, we load the local ones. lowerCAmelCase : Optional[int] = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=lowercase_ ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) self.assertFalse(processor.special_attribute_present ) self.assertFalse(processor.feature_extractor.special_attribute_present ) self.assertFalse(processor.tokenizer.special_attribute_present ) # If remote is enabled, we load from the Hub. lowerCAmelCase : List[str] = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=lowercase_ ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) self.assertTrue(processor.special_attribute_present ) self.assertTrue(processor.feature_extractor.special_attribute_present ) self.assertTrue(processor.tokenizer.special_attribute_present ) finally: if "custom" in CONFIG_MAPPING._extra_content: del CONFIG_MAPPING._extra_content["custom"] if CustomConfig in FEATURE_EXTRACTOR_MAPPING._extra_content: del FEATURE_EXTRACTOR_MAPPING._extra_content[CustomConfig] if CustomConfig in TOKENIZER_MAPPING._extra_content: del TOKENIZER_MAPPING._extra_content[CustomConfig] if CustomConfig in PROCESSOR_MAPPING._extra_content: del PROCESSOR_MAPPING._extra_content[CustomConfig] def _snake_case ( self ) -> Dict: lowerCAmelCase : int = AutoProcessor.from_pretrained("""hf-internal-testing/tiny-random-bert""" ) self.assertEqual(processor.__class__.__name__ , """BertTokenizerFast""" ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Tuple = AutoProcessor.from_pretrained("""hf-internal-testing/tiny-random-convnext""" ) self.assertEqual(processor.__class__.__name__ , """ConvNextImageProcessor""" ) @is_staging_test class _a ( unittest.TestCase ): _UpperCamelCase: Any = ["[UNK]", "[CLS]", "[SEP]", "[PAD]", "[MASK]", "bla", "blou"] @classmethod def _snake_case ( cls ) -> Tuple: lowerCAmelCase : Optional[int] = TOKEN HfFolder.save_token(lowercase_ ) @classmethod def _snake_case ( cls ) -> Union[str, Any]: try: delete_repo(token=cls._token , repo_id="""test-processor""" ) except HTTPError: pass try: delete_repo(token=cls._token , repo_id="""valid_org/test-processor-org""" ) except HTTPError: pass try: delete_repo(token=cls._token , repo_id="""test-dynamic-processor""" ) except HTTPError: pass def _snake_case ( self ) -> List[str]: lowerCAmelCase : Dict = WavaVecaProcessor.from_pretrained(lowercase_ ) with tempfile.TemporaryDirectory() as tmp_dir: processor.save_pretrained( os.path.join(lowercase_ , """test-processor""" ) , push_to_hub=lowercase_ , use_auth_token=self._token ) lowerCAmelCase : List[Any] = WavaVecaProcessor.from_pretrained(f"""{USER}/test-processor""" ) for k, v in processor.feature_extractor.__dict__.items(): self.assertEqual(lowercase_ , getattr(new_processor.feature_extractor , lowercase_ ) ) self.assertDictEqual(new_processor.tokenizer.get_vocab() , processor.tokenizer.get_vocab() ) def _snake_case ( self ) -> int: lowerCAmelCase : Optional[int] = WavaVecaProcessor.from_pretrained(lowercase_ ) with tempfile.TemporaryDirectory() as tmp_dir: processor.save_pretrained( os.path.join(lowercase_ , """test-processor-org""" ) , push_to_hub=lowercase_ , use_auth_token=self._token , organization="""valid_org""" , ) lowerCAmelCase : str = WavaVecaProcessor.from_pretrained("""valid_org/test-processor-org""" ) for k, v in processor.feature_extractor.__dict__.items(): self.assertEqual(lowercase_ , getattr(new_processor.feature_extractor , lowercase_ ) ) self.assertDictEqual(new_processor.tokenizer.get_vocab() , processor.tokenizer.get_vocab() ) def _snake_case ( self ) -> str: CustomFeatureExtractor.register_for_auto_class() CustomTokenizer.register_for_auto_class() CustomProcessor.register_for_auto_class() lowerCAmelCase : Union[str, Any] = CustomFeatureExtractor.from_pretrained(lowercase_ ) with tempfile.TemporaryDirectory() as tmp_dir: lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """vocab.txt""" ) with open(lowercase_ , """w""" , encoding="""utf-8""" ) as vocab_writer: vocab_writer.write("""""".join([x + """\n""" for x in self.vocab_tokens] ) ) lowerCAmelCase : Dict = CustomTokenizer(lowercase_ ) lowerCAmelCase : List[Any] = CustomProcessor(lowercase_ , lowercase_ ) with tempfile.TemporaryDirectory() as tmp_dir: create_repo(f"""{USER}/test-dynamic-processor""" , token=self._token ) lowerCAmelCase : int = Repository(lowercase_ , clone_from=f"""{USER}/test-dynamic-processor""" , token=self._token ) processor.save_pretrained(lowercase_ ) # This has added the proper auto_map field to the feature extractor config self.assertDictEqual( processor.feature_extractor.auto_map , { """AutoFeatureExtractor""": """custom_feature_extraction.CustomFeatureExtractor""", """AutoProcessor""": """custom_processing.CustomProcessor""", } , ) # This has added the proper auto_map field to the tokenizer config with open(os.path.join(lowercase_ , """tokenizer_config.json""" ) ) as f: lowerCAmelCase : Optional[int] = json.load(lowercase_ ) self.assertDictEqual( tokenizer_config["""auto_map"""] , { """AutoTokenizer""": ["""custom_tokenization.CustomTokenizer""", None], """AutoProcessor""": """custom_processing.CustomProcessor""", } , ) # The code has been copied from fixtures self.assertTrue(os.path.isfile(os.path.join(lowercase_ , """custom_feature_extraction.py""" ) ) ) self.assertTrue(os.path.isfile(os.path.join(lowercase_ , """custom_tokenization.py""" ) ) ) self.assertTrue(os.path.isfile(os.path.join(lowercase_ , """custom_processing.py""" ) ) ) repo.push_to_hub() lowerCAmelCase : int = AutoProcessor.from_pretrained(f"""{USER}/test-dynamic-processor""" , trust_remote_code=lowercase_ ) # Can't make an isinstance check because the new_processor is from the CustomProcessor class of a dynamic module self.assertEqual(new_processor.__class__.__name__ , """CustomProcessor""" )
693
# Imports import numpy as np class _a : def __init__( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> List[Any]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) def _snake_case ( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Union[str, Any]: if red is not None: lowerCAmelCase : str = red if green is not None: lowerCAmelCase : Optional[int] = green if blue is not None: lowerCAmelCase : Optional[int] = blue if red_edge is not None: lowerCAmelCase : Tuple = red_edge if nir is not None: lowerCAmelCase : Union[str, Any] = nir return True def _snake_case ( self , lowercase_="" , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Optional[int]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) lowerCAmelCase : int = { """ARVI2""": self.arvaa, """CCCI""": self.ccci, """CVI""": self.cvi, """GLI""": self.gli, """NDVI""": self.ndvi, """BNDVI""": self.bndvi, """redEdgeNDVI""": self.red_edge_ndvi, """GNDVI""": self.gndvi, """GBNDVI""": self.gbndvi, """GRNDVI""": self.grndvi, """RBNDVI""": self.rbndvi, """PNDVI""": self.pndvi, """ATSAVI""": self.atsavi, """BWDRVI""": self.bwdrvi, """CIgreen""": self.ci_green, """CIrededge""": self.ci_rededge, """CI""": self.ci, """CTVI""": self.ctvi, """GDVI""": self.gdvi, """EVI""": self.evi, """GEMI""": self.gemi, """GOSAVI""": self.gosavi, """GSAVI""": self.gsavi, """Hue""": self.hue, """IVI""": self.ivi, """IPVI""": self.ipvi, """I""": self.i, """RVI""": self.rvi, """MRVI""": self.mrvi, """MSAVI""": self.m_savi, """NormG""": self.norm_g, """NormNIR""": self.norm_nir, """NormR""": self.norm_r, """NGRDI""": self.ngrdi, """RI""": self.ri, """S""": self.s, """IF""": self._if, """DVI""": self.dvi, """TVI""": self.tvi, """NDRE""": self.ndre, } try: return funcs[index]() except KeyError: print("""Index not in the list!""" ) return False def _snake_case ( self ) -> Dict: return -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red))) def _snake_case ( self ) -> Optional[Any]: return ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / ( (self.nir - self.red) / (self.nir + self.red) ) def _snake_case ( self ) -> List[str]: return self.nir * (self.red / (self.green**2)) def _snake_case ( self ) -> Tuple: return (2 * self.green - self.red - self.blue) / ( 2 * self.green + self.red + self.blue ) def _snake_case ( self ) -> Optional[int]: return (self.nir - self.red) / (self.nir + self.red) def _snake_case ( self ) -> List[str]: return (self.nir - self.blue) / (self.nir + self.blue) def _snake_case ( self ) -> int: return (self.redEdge - self.red) / (self.redEdge + self.red) def _snake_case ( self ) -> Optional[Any]: return (self.nir - self.green) / (self.nir + self.green) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.blue)) / ( self.nir + (self.green + self.blue) ) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.red)) / ( self.nir + (self.green + self.red) ) def _snake_case ( self ) -> int: return (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red)) def _snake_case ( self ) -> List[str]: return (self.nir - (self.green + self.red + self.blue)) / ( self.nir + (self.green + self.red + self.blue) ) def _snake_case ( self , lowercase_=0.0_8 , lowercase_=1.2_2 , lowercase_=0.0_3 ) -> int: return a * ( (self.nir - a * self.red - b) / (a * self.nir + self.red - a * b + x * (1 + a**2)) ) def _snake_case ( self ) -> Optional[Any]: return (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue) def _snake_case ( self ) -> Any: return (self.nir / self.green) - 1 def _snake_case ( self ) -> List[Any]: return (self.nir / self.redEdge) - 1 def _snake_case ( self ) -> str: return (self.red - self.blue) / self.red def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self.ndvi() return ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2)) def _snake_case ( self ) -> Optional[Any]: return self.nir - self.green def _snake_case ( self ) -> int: return 2.5 * ( (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1) ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Tuple = (2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / ( self.nir + self.red + 0.5 ) return n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red) def _snake_case ( self , lowercase_=0.1_6 ) -> Optional[int]: return (self.nir - self.green) / (self.nir + self.green + y) def _snake_case ( self , lowercase_=0.5 ) -> List[str]: return ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n) def _snake_case ( self ) -> Any: return np.arctan( ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) ) def _snake_case ( self , lowercase_=None , lowercase_=None ) -> List[Any]: return (self.nir - b) / (a * self.red) def _snake_case ( self ) -> Any: return (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1) def _snake_case ( self ) -> str: return (self.red + self.green + self.blue) / 3_0.5 def _snake_case ( self ) -> Union[str, Any]: return self.nir / self.red def _snake_case ( self ) -> Tuple: return (self.rvi() - 1) / (self.rvi() + 1) def _snake_case ( self ) -> Dict: return ( (2 * self.nir + 1) - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2) ) / 2 def _snake_case ( self ) -> List[Any]: return self.green / (self.nir + self.red + self.green) def _snake_case ( self ) -> int: return self.nir / (self.nir + self.red + self.green) def _snake_case ( self ) -> Dict: return self.red / (self.nir + self.red + self.green) def _snake_case ( self ) -> List[Any]: return (self.green - self.red) / (self.green + self.red) def _snake_case ( self ) -> Optional[int]: return (self.red - self.green) / (self.red + self.green) def _snake_case ( self ) -> Tuple: lowerCAmelCase : Any = np.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] ) lowerCAmelCase : Dict = np.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] ) return (max_value - min_value) / max_value def _snake_case ( self ) -> int: return (2 * self.red - self.green - self.blue) / (self.green - self.blue) def _snake_case ( self ) -> List[str]: return self.nir / self.red def _snake_case ( self ) -> int: return (self.ndvi() + 0.5) ** (1 / 2) def _snake_case ( self ) -> str: return (self.nir - self.redEdge) / (self.nir + self.redEdge)
693
1
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return numa ^ numa < 0 if __name__ == "__main__": import doctest doctest.testmod()
693
import argparse import json import math import os import time import traceback import zipfile from collections import Counter import requests def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[str] = None if token is not None: lowerCAmelCase : Union[str, Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[Any] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{workflow_run_id}/jobs?per_page=100""" lowerCAmelCase : Any = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) lowerCAmelCase : int = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : List[str] = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) return job_links except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = None if token is not None: lowerCAmelCase : str = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[int] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{worflow_run_id}/artifacts?per_page=100""" lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) lowerCAmelCase : Optional[int] = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : int = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) return artifacts except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = None if token is not None: lowerCAmelCase : Optional[Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : str = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = result.headers["""Location"""] lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = os.path.join(SCREAMING_SNAKE_CASE__ ,F"""{artifact_name}.zip""" ) with open(SCREAMING_SNAKE_CASE__ ,"""wb""" ) as fp: fp.write(response.content ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = [] lowerCAmelCase : Optional[int] = [] lowerCAmelCase : Optional[int] = None with zipfile.ZipFile(SCREAMING_SNAKE_CASE__ ) as z: for filename in z.namelist(): if not os.path.isdir(SCREAMING_SNAKE_CASE__ ): # read the file if filename in ["failures_line.txt", "summary_short.txt", "job_name.txt"]: with z.open(SCREAMING_SNAKE_CASE__ ) as f: for line in f: lowerCAmelCase : Optional[Any] = line.decode("""UTF-8""" ).strip() if filename == "failures_line.txt": try: # `error_line` is the place where `error` occurs lowerCAmelCase : str = line[: line.index(""": """ )] lowerCAmelCase : Optional[int] = line[line.index(""": """ ) + len(""": """ ) :] errors.append([error_line, error] ) except Exception: # skip un-related lines pass elif filename == "summary_short.txt" and line.startswith("""FAILED """ ): # `test` is the test method that failed lowerCAmelCase : Union[str, Any] = line[len("""FAILED """ ) :] failed_tests.append(SCREAMING_SNAKE_CASE__ ) elif filename == "job_name.txt": lowerCAmelCase : Union[str, Any] = line if len(SCREAMING_SNAKE_CASE__ ) != len(SCREAMING_SNAKE_CASE__ ): raise ValueError( F"""`errors` and `failed_tests` should have the same number of elements. Got {len(SCREAMING_SNAKE_CASE__ )} for `errors` """ F"""and {len(SCREAMING_SNAKE_CASE__ )} for `failed_tests` instead. The test reports in {artifact_zip_path} have some""" """ problem.""" ) lowerCAmelCase : Optional[int] = None if job_name and job_links: lowerCAmelCase : Optional[int] = job_links.get(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) # A list with elements of the form (line of error, error, failed test) lowerCAmelCase : Union[str, Any] = [x + [y] + [job_link] for x, y in zip(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )] return result def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : str = [] lowerCAmelCase : Union[str, Any] = [os.path.join(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) for p in os.listdir(SCREAMING_SNAKE_CASE__ ) if p.endswith(""".zip""" )] for p in paths: errors.extend(get_errors_from_single_artifact(SCREAMING_SNAKE_CASE__ ,job_links=SCREAMING_SNAKE_CASE__ ) ) return errors def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = Counter() counter.update([x[1] for x in logs] ) lowerCAmelCase : List[str] = counter.most_common() lowerCAmelCase : Union[str, Any] = {} for error, count in counts: if error_filter is None or error not in error_filter: lowerCAmelCase : List[Any] = {"""count""": count, """failed_tests""": [(x[2], x[0]) for x in logs if x[1] == error]} lowerCAmelCase : int = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = test.split("""::""" )[0] if test.startswith("""tests/models/""" ): lowerCAmelCase : str = test.split("""/""" )[2] else: lowerCAmelCase : List[Any] = None return test def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[Any] = [(x[0], x[1], get_model(x[2] )) for x in logs] lowerCAmelCase : int = [x for x in logs if x[2] is not None] lowerCAmelCase : Optional[Any] = {x[2] for x in logs} lowerCAmelCase : Dict = {} for test in tests: lowerCAmelCase : Optional[int] = Counter() # count by errors in `test` counter.update([x[1] for x in logs if x[2] == test] ) lowerCAmelCase : Tuple = counter.most_common() lowerCAmelCase : Union[str, Any] = {error: count for error, count in counts if (error_filter is None or error not in error_filter)} lowerCAmelCase : List[Any] = sum(error_counts.values() ) if n_errors > 0: lowerCAmelCase : Optional[int] = {"""count""": n_errors, """errors""": error_counts} lowerCAmelCase : Any = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = """| no. | error | status |""" lowerCAmelCase : List[Any] = """|-:|:-|:-|""" lowerCAmelCase : Union[str, Any] = [header, sep] for error in reduced_by_error: lowerCAmelCase : List[str] = reduced_by_error[error]["""count"""] lowerCAmelCase : Any = F"""| {count} | {error[:1_0_0]} | |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = """| model | no. of errors | major error | count |""" lowerCAmelCase : Any = """|-:|-:|-:|-:|""" lowerCAmelCase : str = [header, sep] for model in reduced_by_model: lowerCAmelCase : Any = reduced_by_model[model]["""count"""] lowerCAmelCase , lowerCAmelCase : Optional[int] = list(reduced_by_model[model]["""errors"""].items() )[0] lowerCAmelCase : Optional[Any] = F"""| {model} | {count} | {error[:6_0]} | {_count} |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": lowerCAmelCase : int =argparse.ArgumentParser() # Required parameters parser.add_argument('--workflow_run_id', type=str, required=True, help='A GitHub Actions workflow run id.') parser.add_argument( '--output_dir', type=str, required=True, help='Where to store the downloaded artifacts and other result files.', ) parser.add_argument('--token', default=None, type=str, help='A token that has actions:read permission.') lowerCAmelCase : Dict =parser.parse_args() os.makedirs(args.output_dir, exist_ok=True) lowerCAmelCase : Optional[int] =get_job_links(args.workflow_run_id, token=args.token) lowerCAmelCase : List[Any] ={} # To deal with `workflow_call` event, where a job name is the combination of the job names in the caller and callee. # For example, `PyTorch 1.11 / Model tests (models/albert, single-gpu)`. if _job_links: for k, v in _job_links.items(): # This is how GitHub actions combine job names. if " / " in k: lowerCAmelCase : str =k.find(' / ') lowerCAmelCase : Any =k[index + len(' / ') :] lowerCAmelCase : str =v with open(os.path.join(args.output_dir, 'job_links.json'), 'w', encoding='UTF-8') as fp: json.dump(job_links, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Any =get_artifacts_links(args.workflow_run_id, token=args.token) with open(os.path.join(args.output_dir, 'artifacts.json'), 'w', encoding='UTF-8') as fp: json.dump(artifacts, fp, ensure_ascii=False, indent=4) for idx, (name, url) in enumerate(artifacts.items()): download_artifact(name, url, args.output_dir, args.token) # Be gentle to GitHub time.sleep(1) lowerCAmelCase : List[Any] =get_all_errors(args.output_dir, job_links=job_links) # `e[1]` is the error lowerCAmelCase : str =Counter() counter.update([e[1] for e in errors]) # print the top 30 most common test errors lowerCAmelCase : int =counter.most_common(30) for item in most_common: print(item) with open(os.path.join(args.output_dir, 'errors.json'), 'w', encoding='UTF-8') as fp: json.dump(errors, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Optional[int] =reduce_by_error(errors) lowerCAmelCase : Tuple =reduce_by_model(errors) lowerCAmelCase : Optional[Any] =make_github_table(reduced_by_error) lowerCAmelCase : Union[str, Any] =make_github_table_per_model(reduced_by_model) with open(os.path.join(args.output_dir, 'reduced_by_error.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa) with open(os.path.join(args.output_dir, 'reduced_by_model.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa)
693
1
import argparse import os.path as osp import re import torch from safetensors.torch import load_file, save_file # =================# # UNet Conversion # # =================# lowerCAmelCase : str =[ # (stable-diffusion, HF Diffusers) ('time_embed.0.weight', 'time_embedding.linear_1.weight'), ('time_embed.0.bias', 'time_embedding.linear_1.bias'), ('time_embed.2.weight', 'time_embedding.linear_2.weight'), ('time_embed.2.bias', 'time_embedding.linear_2.bias'), ('input_blocks.0.0.weight', 'conv_in.weight'), ('input_blocks.0.0.bias', 'conv_in.bias'), ('out.0.weight', 'conv_norm_out.weight'), ('out.0.bias', 'conv_norm_out.bias'), ('out.2.weight', 'conv_out.weight'), ('out.2.bias', 'conv_out.bias'), ] lowerCAmelCase : str =[ # (stable-diffusion, HF Diffusers) ('in_layers.0', 'norm1'), ('in_layers.2', 'conv1'), ('out_layers.0', 'norm2'), ('out_layers.3', 'conv2'), ('emb_layers.1', 'time_emb_proj'), ('skip_connection', 'conv_shortcut'), ] lowerCAmelCase : List[str] =[] # hardcoded number of downblocks and resnets/attentions... # would need smarter logic for other networks. for i in range(4): # loop over downblocks/upblocks for j in range(2): # loop over resnets/attentions for downblocks lowerCAmelCase : Optional[Any] =F'''down_blocks.{i}.resnets.{j}.''' lowerCAmelCase : Dict =F'''input_blocks.{3*i + j + 1}.0.''' unet_conversion_map_layer.append((sd_down_res_prefix, hf_down_res_prefix)) if i < 3: # no attention layers in down_blocks.3 lowerCAmelCase : List[str] =F'''down_blocks.{i}.attentions.{j}.''' lowerCAmelCase : Any =F'''input_blocks.{3*i + j + 1}.1.''' unet_conversion_map_layer.append((sd_down_atn_prefix, hf_down_atn_prefix)) for j in range(3): # loop over resnets/attentions for upblocks lowerCAmelCase : Dict =F'''up_blocks.{i}.resnets.{j}.''' lowerCAmelCase : List[Any] =F'''output_blocks.{3*i + j}.0.''' unet_conversion_map_layer.append((sd_up_res_prefix, hf_up_res_prefix)) if i > 0: # no attention layers in up_blocks.0 lowerCAmelCase : int =F'''up_blocks.{i}.attentions.{j}.''' lowerCAmelCase : int =F'''output_blocks.{3*i + j}.1.''' unet_conversion_map_layer.append((sd_up_atn_prefix, hf_up_atn_prefix)) if i < 3: # no downsample in down_blocks.3 lowerCAmelCase : int =F'''down_blocks.{i}.downsamplers.0.conv.''' lowerCAmelCase : Tuple =F'''input_blocks.{3*(i+1)}.0.op.''' unet_conversion_map_layer.append((sd_downsample_prefix, hf_downsample_prefix)) # no upsample in up_blocks.3 lowerCAmelCase : int =F'''up_blocks.{i}.upsamplers.0.''' lowerCAmelCase : Dict =F'''output_blocks.{3*i + 2}.{1 if i == 0 else 2}.''' unet_conversion_map_layer.append((sd_upsample_prefix, hf_upsample_prefix)) lowerCAmelCase : Dict ='mid_block.attentions.0.' lowerCAmelCase : Optional[Any] ='middle_block.1.' unet_conversion_map_layer.append((sd_mid_atn_prefix, hf_mid_atn_prefix)) for j in range(2): lowerCAmelCase : List[Any] =F'''mid_block.resnets.{j}.''' lowerCAmelCase : List[str] =F'''middle_block.{2*j}.''' unet_conversion_map_layer.append((sd_mid_res_prefix, hf_mid_res_prefix)) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = {k: k for k in unet_state_dict.keys()} for sd_name, hf_name in unet_conversion_map: lowerCAmelCase : Union[str, Any] = sd_name for k, v in mapping.items(): if "resnets" in k: for sd_part, hf_part in unet_conversion_map_resnet: lowerCAmelCase : Optional[int] = v.replace(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : str = v for k, v in mapping.items(): for sd_part, hf_part in unet_conversion_map_layer: lowerCAmelCase : Any = v.replace(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[Any] = v lowerCAmelCase : Any = {v: unet_state_dict[k] for k, v in mapping.items()} return new_state_dict # ================# # VAE Conversion # # ================# lowerCAmelCase : Any =[ # (stable-diffusion, HF Diffusers) ('nin_shortcut', 'conv_shortcut'), ('norm_out', 'conv_norm_out'), ('mid.attn_1.', 'mid_block.attentions.0.'), ] for i in range(4): # down_blocks have two resnets for j in range(2): lowerCAmelCase : List[str] =F'''encoder.down_blocks.{i}.resnets.{j}.''' lowerCAmelCase : List[Any] =F'''encoder.down.{i}.block.{j}.''' vae_conversion_map.append((sd_down_prefix, hf_down_prefix)) if i < 3: lowerCAmelCase : str =F'''down_blocks.{i}.downsamplers.0.''' lowerCAmelCase : Union[str, Any] =F'''down.{i}.downsample.''' vae_conversion_map.append((sd_downsample_prefix, hf_downsample_prefix)) lowerCAmelCase : List[str] =F'''up_blocks.{i}.upsamplers.0.''' lowerCAmelCase : Optional[int] =F'''up.{3-i}.upsample.''' vae_conversion_map.append((sd_upsample_prefix, hf_upsample_prefix)) # up_blocks have three resnets # also, up blocks in hf are numbered in reverse from sd for j in range(3): lowerCAmelCase : List[str] =F'''decoder.up_blocks.{i}.resnets.{j}.''' lowerCAmelCase : Dict =F'''decoder.up.{3-i}.block.{j}.''' vae_conversion_map.append((sd_up_prefix, hf_up_prefix)) # this part accounts for mid blocks in both the encoder and the decoder for i in range(2): lowerCAmelCase : Optional[Any] =F'''mid_block.resnets.{i}.''' lowerCAmelCase : Union[str, Any] =F'''mid.block_{i+1}.''' vae_conversion_map.append((sd_mid_res_prefix, hf_mid_res_prefix)) lowerCAmelCase : Dict =[ # (stable-diffusion, HF Diffusers) ('norm.', 'group_norm.'), ('q.', 'query.'), ('k.', 'key.'), ('v.', 'value.'), ('proj_out.', 'proj_attn.'), ] def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return w.reshape(*w.shape ,1 ,1 ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = {k: k for k in vae_state_dict.keys()} for k, v in mapping.items(): for sd_part, hf_part in vae_conversion_map: lowerCAmelCase : Tuple = v.replace(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : List[Any] = v for k, v in mapping.items(): if "attentions" in k: for sd_part, hf_part in vae_conversion_map_attn: lowerCAmelCase : Union[str, Any] = v.replace(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[int] = v lowerCAmelCase : List[Any] = {v: vae_state_dict[k] for k, v in mapping.items()} lowerCAmelCase : Tuple = ["""q""", """k""", """v""", """proj_out"""] for k, v in new_state_dict.items(): for weight_name in weights_to_convert: if F"""mid.attn_1.{weight_name}.weight""" in k: print(F"""Reshaping {k} for SD format""" ) lowerCAmelCase : Dict = reshape_weight_for_sd(SCREAMING_SNAKE_CASE__ ) return new_state_dict # =========================# # Text Encoder Conversion # # =========================# lowerCAmelCase : List[Any] =[ # (stable-diffusion, HF Diffusers) ('resblocks.', 'text_model.encoder.layers.'), ('ln_1', 'layer_norm1'), ('ln_2', 'layer_norm2'), ('.c_fc.', '.fc1.'), ('.c_proj.', '.fc2.'), ('.attn', '.self_attn'), ('ln_final.', 'transformer.text_model.final_layer_norm.'), ('token_embedding.weight', 'transformer.text_model.embeddings.token_embedding.weight'), ('positional_embedding', 'transformer.text_model.embeddings.position_embedding.weight'), ] lowerCAmelCase : str ={re.escape(x[1]): x[0] for x in textenc_conversion_lst} lowerCAmelCase : str =re.compile('|'.join(protected.keys())) # Ordering is from https://github.com/pytorch/pytorch/blob/master/test/cpp/api/modules.cpp lowerCAmelCase : str ={'q': 0, 'k': 1, 'v': 2} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = {} lowerCAmelCase : Union[str, Any] = {} lowerCAmelCase : Union[str, Any] = {} for k, v in text_enc_dict.items(): if ( k.endswith(""".self_attn.q_proj.weight""" ) or k.endswith(""".self_attn.k_proj.weight""" ) or k.endswith(""".self_attn.v_proj.weight""" ) ): lowerCAmelCase : Optional[Any] = k[: -len(""".q_proj.weight""" )] lowerCAmelCase : str = k[-len("""q_proj.weight""" )] if k_pre not in capture_qkv_weight: lowerCAmelCase : List[str] = [None, None, None] lowerCAmelCase : Optional[Any] = v continue if ( k.endswith(""".self_attn.q_proj.bias""" ) or k.endswith(""".self_attn.k_proj.bias""" ) or k.endswith(""".self_attn.v_proj.bias""" ) ): lowerCAmelCase : Dict = k[: -len(""".q_proj.bias""" )] lowerCAmelCase : int = k[-len("""q_proj.bias""" )] if k_pre not in capture_qkv_bias: lowerCAmelCase : List[Any] = [None, None, None] lowerCAmelCase : Tuple = v continue lowerCAmelCase : List[Any] = textenc_pattern.sub(lambda SCREAMING_SNAKE_CASE__ : protected[re.escape(m.group(0 ) )] ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[int] = v for k_pre, tensors in capture_qkv_weight.items(): if None in tensors: raise Exception("""CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing""" ) lowerCAmelCase : int = textenc_pattern.sub(lambda SCREAMING_SNAKE_CASE__ : protected[re.escape(m.group(0 ) )] ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : str = torch.cat(SCREAMING_SNAKE_CASE__ ) for k_pre, tensors in capture_qkv_bias.items(): if None in tensors: raise Exception("""CORRUPTED MODEL: one of the q-k-v values for the text encoder was missing""" ) lowerCAmelCase : str = textenc_pattern.sub(lambda SCREAMING_SNAKE_CASE__ : protected[re.escape(m.group(0 ) )] ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[int] = torch.cat(SCREAMING_SNAKE_CASE__ ) return new_state_dict def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return text_enc_dict if __name__ == "__main__": lowerCAmelCase : Union[str, Any] =argparse.ArgumentParser() parser.add_argument('--model_path', default=None, type=str, required=True, help='Path to the model to convert.') parser.add_argument('--checkpoint_path', default=None, type=str, required=True, help='Path to the output model.') parser.add_argument('--half', action='store_true', help='Save weights in half precision.') parser.add_argument( '--use_safetensors', action='store_true', help='Save weights use safetensors, default is ckpt.' ) lowerCAmelCase : Any =parser.parse_args() assert args.model_path is not None, "Must provide a model path!" assert args.checkpoint_path is not None, "Must provide a checkpoint path!" # Path for safetensors lowerCAmelCase : Tuple =osp.join(args.model_path, 'unet', 'diffusion_pytorch_model.safetensors') lowerCAmelCase : Union[str, Any] =osp.join(args.model_path, 'vae', 'diffusion_pytorch_model.safetensors') lowerCAmelCase : Optional[Any] =osp.join(args.model_path, 'text_encoder', 'model.safetensors') # Load models from safetensors if it exists, if it doesn't pytorch if osp.exists(unet_path): lowerCAmelCase : List[str] =load_file(unet_path, device='cpu') else: lowerCAmelCase : List[Any] =osp.join(args.model_path, 'unet', 'diffusion_pytorch_model.bin') lowerCAmelCase : Optional[Any] =torch.load(unet_path, map_location='cpu') if osp.exists(vae_path): lowerCAmelCase : Dict =load_file(vae_path, device='cpu') else: lowerCAmelCase : Dict =osp.join(args.model_path, 'vae', 'diffusion_pytorch_model.bin') lowerCAmelCase : Any =torch.load(vae_path, map_location='cpu') if osp.exists(text_enc_path): lowerCAmelCase : List[Any] =load_file(text_enc_path, device='cpu') else: lowerCAmelCase : Dict =osp.join(args.model_path, 'text_encoder', 'pytorch_model.bin') lowerCAmelCase : List[Any] =torch.load(text_enc_path, map_location='cpu') # Convert the UNet model lowerCAmelCase : Optional[int] =convert_unet_state_dict(unet_state_dict) lowerCAmelCase : Dict ={'model.diffusion_model.' + k: v for k, v in unet_state_dict.items()} # Convert the VAE model lowerCAmelCase : Dict =convert_vae_state_dict(vae_state_dict) lowerCAmelCase : Any ={'first_stage_model.' + k: v for k, v in vae_state_dict.items()} # Easiest way to identify v2.0 model seems to be that the text encoder (OpenCLIP) is deeper lowerCAmelCase : Tuple ='text_model.encoder.layers.22.layer_norm2.bias' in text_enc_dict if is_vaa_model: # Need to add the tag 'transformer' in advance so we can knock it out from the final layer-norm lowerCAmelCase : str ={'transformer.' + k: v for k, v in text_enc_dict.items()} lowerCAmelCase : str =convert_text_enc_state_dict_vaa(text_enc_dict) lowerCAmelCase : Any ={'cond_stage_model.model.' + k: v for k, v in text_enc_dict.items()} else: lowerCAmelCase : Tuple =convert_text_enc_state_dict(text_enc_dict) lowerCAmelCase : Union[str, Any] ={'cond_stage_model.transformer.' + k: v for k, v in text_enc_dict.items()} # Put together new checkpoint lowerCAmelCase : Optional[int] ={**unet_state_dict, **vae_state_dict, **text_enc_dict} if args.half: lowerCAmelCase : List[str] ={k: v.half() for k, v in state_dict.items()} if args.use_safetensors: save_file(state_dict, args.checkpoint_path) else: lowerCAmelCase : str ={'state_dict': state_dict} torch.save(state_dict, args.checkpoint_path)
693
from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] ={ 'configuration_autoformer': [ 'AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'AutoformerConfig', ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =[ 'AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'AutoformerForPrediction', 'AutoformerModel', 'AutoformerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_autoformer import ( AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, AutoformerConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_autoformer import ( AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, AutoformerForPrediction, AutoformerModel, AutoformerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
1
import logging from transformers import PretrainedConfig lowerCAmelCase : Tuple =logging.getLogger(__name__) lowerCAmelCase : Tuple ={ 'bertabs-finetuned-cnndm': 'https://huggingface.co/remi/bertabs-finetuned-cnndm-extractive-abstractive-summarization/resolve/main/config.json', } class _a ( snake_case_ ): _UpperCamelCase: Union[str, Any] = "bertabs" def __init__( self , lowercase_=30522 , lowercase_=512 , lowercase_=6 , lowercase_=512 , lowercase_=8 , lowercase_=512 , lowercase_=0.2 , lowercase_=6 , lowercase_=768 , lowercase_=8 , lowercase_=2048 , lowercase_=0.2 , **lowercase_ , ) -> Optional[Any]: super().__init__(**lowercase_ ) lowerCAmelCase : Union[str, Any] = vocab_size lowerCAmelCase : List[Any] = max_pos lowerCAmelCase : Optional[Any] = enc_layers lowerCAmelCase : int = enc_hidden_size lowerCAmelCase : Union[str, Any] = enc_heads lowerCAmelCase : Any = enc_ff_size lowerCAmelCase : List[Any] = enc_dropout lowerCAmelCase : Any = dec_layers lowerCAmelCase : Any = dec_hidden_size lowerCAmelCase : int = dec_heads lowerCAmelCase : int = dec_ff_size lowerCAmelCase : int = dec_dropout
693
import copy from collections import OrderedDict from typing import Dict, Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto import CONFIG_MAPPING lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Union[str, Any] ={ 'facebook/detr-resnet-50': 'https://huggingface.co/facebook/detr-resnet-50/resolve/main/config.json', # See all DETR models at https://huggingface.co/models?filter=detr } class _a ( snake_case_ ): _UpperCamelCase: List[str] = "detr" _UpperCamelCase: Dict = ["past_key_values"] _UpperCamelCase: Optional[int] = { "hidden_size": "d_model", "num_attention_heads": "encoder_attention_heads", } def __init__( self , lowercase_=True , lowercase_=None , lowercase_=3 , lowercase_=100 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=True , lowercase_="relu" , lowercase_=256 , lowercase_=0.1 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.0_2 , lowercase_=1.0 , lowercase_=False , lowercase_="sine" , lowercase_="resnet50" , lowercase_=True , lowercase_=False , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=1 , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=0.1 , **lowercase_ , ) -> Optional[int]: if backbone_config is not None and use_timm_backbone: raise ValueError("""You can't specify both `backbone_config` and `use_timm_backbone`.""" ) if not use_timm_backbone: if backbone_config is None: logger.info("""`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.""" ) lowerCAmelCase : Optional[Any] = CONFIG_MAPPING["""resnet"""](out_features=["""stage4"""] ) elif isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = backbone_config.get("""model_type""" ) lowerCAmelCase : int = CONFIG_MAPPING[backbone_model_type] lowerCAmelCase : Optional[int] = config_class.from_dict(lowercase_ ) # set timm attributes to None lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Dict = None, None, None lowerCAmelCase : Any = use_timm_backbone lowerCAmelCase : int = backbone_config lowerCAmelCase : Optional[int] = num_channels lowerCAmelCase : Optional[Any] = num_queries lowerCAmelCase : List[str] = d_model lowerCAmelCase : Optional[int] = encoder_ffn_dim lowerCAmelCase : Dict = encoder_layers lowerCAmelCase : str = encoder_attention_heads lowerCAmelCase : List[Any] = decoder_ffn_dim lowerCAmelCase : List[Any] = decoder_layers lowerCAmelCase : Union[str, Any] = decoder_attention_heads lowerCAmelCase : str = dropout lowerCAmelCase : Dict = attention_dropout lowerCAmelCase : Union[str, Any] = activation_dropout lowerCAmelCase : str = activation_function lowerCAmelCase : Optional[int] = init_std lowerCAmelCase : Any = init_xavier_std lowerCAmelCase : Dict = encoder_layerdrop lowerCAmelCase : int = decoder_layerdrop lowerCAmelCase : Tuple = encoder_layers lowerCAmelCase : Optional[int] = auxiliary_loss lowerCAmelCase : List[str] = position_embedding_type lowerCAmelCase : Any = backbone lowerCAmelCase : Union[str, Any] = use_pretrained_backbone lowerCAmelCase : List[Any] = dilation # Hungarian matcher lowerCAmelCase : Tuple = class_cost lowerCAmelCase : Union[str, Any] = bbox_cost lowerCAmelCase : Optional[Any] = giou_cost # Loss coefficients lowerCAmelCase : List[Any] = mask_loss_coefficient lowerCAmelCase : Optional[int] = dice_loss_coefficient lowerCAmelCase : Tuple = bbox_loss_coefficient lowerCAmelCase : Dict = giou_loss_coefficient lowerCAmelCase : str = eos_coefficient super().__init__(is_encoder_decoder=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> int: return self.encoder_attention_heads @property def _snake_case ( self ) -> int: return self.d_model @classmethod def _snake_case ( cls , lowercase_ , **lowercase_ ) -> Any: return cls(backbone_config=lowercase_ , **lowercase_ ) def _snake_case ( self ) -> Dict[str, any]: lowerCAmelCase : Optional[int] = copy.deepcopy(self.__dict__ ) if output["backbone_config"] is not None: lowerCAmelCase : List[str] = self.backbone_config.to_dict() lowerCAmelCase : List[Any] = self.__class__.model_type return output class _a ( snake_case_ ): _UpperCamelCase: Any = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ("""pixel_mask""", {0: """batch"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-5 @property def _snake_case ( self ) -> int: return 12
693
1
import html from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin from ...utils import is_bsa_available, logging, requires_backends if is_bsa_available(): import bsa from bsa import BeautifulSoup lowerCAmelCase : List[Any] =logging.get_logger(__name__) class _a ( snake_case_ ): def __init__( self , **lowercase_ ) -> Any: requires_backends(self , ["""bs4"""] ) super().__init__(**lowercase_ ) def _snake_case ( self , lowercase_ ) -> Optional[int]: lowerCAmelCase : str = [] lowerCAmelCase : Tuple = [] lowerCAmelCase : Dict = element if element.name else element.parent for parent in child.parents: # type: bs4.element.Tag lowerCAmelCase : Union[str, Any] = parent.find_all(child.name , recursive=lowercase_ ) xpath_tags.append(child.name ) xpath_subscripts.append( 0 if 1 == len(lowercase_ ) else next(i for i, s in enumerate(lowercase_ , 1 ) if s is child ) ) lowerCAmelCase : List[Any] = parent xpath_tags.reverse() xpath_subscripts.reverse() return xpath_tags, xpath_subscripts def _snake_case ( self , lowercase_ ) -> Optional[Any]: lowerCAmelCase : str = BeautifulSoup(lowercase_ , """html.parser""" ) lowerCAmelCase : str = [] lowerCAmelCase : Tuple = [] lowerCAmelCase : int = [] for element in html_code.descendants: if type(lowercase_ ) == bsa.element.NavigableString: if type(element.parent ) != bsa.element.Tag: continue lowerCAmelCase : Any = html.unescape(lowercase_ ).strip() if not text_in_this_tag: continue all_doc_strings.append(lowercase_ ) lowerCAmelCase , lowerCAmelCase : List[Any] = self.xpath_soup(lowercase_ ) stringaxtag_seq.append(lowercase_ ) stringaxsubs_seq.append(lowercase_ ) if len(lowercase_ ) != len(lowercase_ ): raise ValueError("""Number of doc strings and xtags does not correspond""" ) if len(lowercase_ ) != len(lowercase_ ): raise ValueError("""Number of doc strings and xsubs does not correspond""" ) return all_doc_strings, stringaxtag_seq, stringaxsubs_seq def _snake_case ( self , lowercase_ , lowercase_ ) -> Any: lowerCAmelCase : Tuple = """""" for tagname, subs in zip(lowercase_ , lowercase_ ): xpath += f"""/{tagname}""" if subs != 0: xpath += f"""[{subs}]""" return xpath def __call__( self , lowercase_ ) -> BatchFeature: lowerCAmelCase : Dict = False # Check that strings has a valid type if isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Tuple = True elif isinstance(lowercase_ , (list, tuple) ): if len(lowercase_ ) == 0 or isinstance(html_strings[0] , lowercase_ ): lowerCAmelCase : Union[str, Any] = True if not valid_strings: raise ValueError( """HTML strings must of type `str`, `List[str]` (batch of examples), """ f"""but is of type {type(lowercase_ )}.""" ) lowerCAmelCase : Dict = bool(isinstance(lowercase_ , (list, tuple) ) and (isinstance(html_strings[0] , lowercase_ )) ) if not is_batched: lowerCAmelCase : List[str] = [html_strings] # Get nodes + xpaths lowerCAmelCase : Optional[int] = [] lowerCAmelCase : Dict = [] for html_string in html_strings: lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Optional[Any] = self.get_three_from_single(lowercase_ ) nodes.append(lowercase_ ) lowerCAmelCase : List[Any] = [] for node, tag_list, sub_list in zip(lowercase_ , lowercase_ , lowercase_ ): lowerCAmelCase : Union[str, Any] = self.construct_xpath(lowercase_ , lowercase_ ) xpath_strings.append(lowercase_ ) xpaths.append(lowercase_ ) # return as Dict lowerCAmelCase : Union[str, Any] = {"""nodes""": nodes, """xpaths""": xpaths} lowerCAmelCase : Optional[Any] = BatchFeature(data=lowercase_ , tensor_type=lowercase_ ) return encoded_inputs
693
import json import logging import os import sys from pathlib import Path import finetune_rag from transformers.file_utils import is_apex_available from transformers.testing_utils import ( TestCasePlus, execute_subprocess_async, require_ray, require_torch_gpu, require_torch_multi_gpu, ) logging.basicConfig(level=logging.DEBUG) lowerCAmelCase : int =logging.getLogger() lowerCAmelCase : str =logging.StreamHandler(sys.stdout) logger.addHandler(stream_handler) class _a ( snake_case_ ): def _snake_case ( self , lowercase_ ) -> List[Any]: os.makedirs(lowercase_ , exist_ok=lowercase_ ) lowerCAmelCase : int = {"""source""": """What is love ?""", """target""": """life"""} lowerCAmelCase : Optional[Any] = {"""train""": 12, """val""": 2, """test""": 2} for split in ["train", "test", "val"]: for field in ["source", "target"]: lowerCAmelCase : Tuple = """\n""".join([contents[field]] * n_lines[split] ) with open(os.path.join(lowercase_ , f"""{split}.{field}""" ) , """w""" ) as f: f.write(lowercase_ ) def _snake_case ( self , lowercase_ , lowercase_ = "pytorch" ) -> str: lowerCAmelCase : Dict = self.get_auto_remove_tmp_dir() lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """output""" ) lowerCAmelCase : Dict = os.path.join(lowercase_ , """data""" ) self._create_dummy_data(data_dir=lowercase_ ) lowerCAmelCase : str = f""" --data_dir {data_dir} \ --output_dir {output_dir} \ --model_name_or_path facebook/rag-sequence-base \ --model_type rag_sequence \ --do_train \ --do_predict \ --n_val -1 \ --val_check_interval 1.0 \ --train_batch_size 2 \ --eval_batch_size 1 \ --max_source_length 25 \ --max_target_length 25 \ --val_max_target_length 25 \ --test_max_target_length 25 \ --label_smoothing 0.1 \ --dropout 0.1 \ --attention_dropout 0.1 \ --weight_decay 0.001 \ --adam_epsilon 1e-08 \ --max_grad_norm 0.1 \ --lr_scheduler polynomial \ --learning_rate 3e-04 \ --num_train_epochs 1 \ --warmup_steps 4 \ --gradient_accumulation_steps 1 \ --distributed-port 8787 \ --use_dummy_dataset 1 \ --distributed_retriever {distributed_retriever} \ """.split() if gpus > 0: testargs.append(f"""--gpus={gpus}""" ) if is_apex_available(): testargs.append("""--fp16""" ) else: testargs.append("""--gpus=0""" ) testargs.append("""--distributed_backend=ddp_cpu""" ) testargs.append("""--num_processes=2""" ) lowerCAmelCase : Optional[int] = [sys.executable, str(Path(finetune_rag.__file__ ).resolve() )] + testargs execute_subprocess_async(lowercase_ , env=self.get_env() ) lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """metrics.json""" ) with open(lowercase_ ) as f: lowerCAmelCase : List[str] = json.load(lowercase_ ) return result @require_torch_gpu def _snake_case ( self ) -> Any: lowerCAmelCase : Tuple = self._run_finetune(gpus=1 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self._run_finetune(gpus=2 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_gpu @require_ray def _snake_case ( self ) -> int: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu @require_ray def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 )
693
1
import sys def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = len(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Tuple = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] lowerCAmelCase : Dict = [[0 for x in range(SCREAMING_SNAKE_CASE__ )] for x in range(SCREAMING_SNAKE_CASE__ )] for chain_length in range(2 ,SCREAMING_SNAKE_CASE__ ): for a in range(1 ,n - chain_length + 1 ): lowerCAmelCase : Optional[Any] = a + chain_length - 1 lowerCAmelCase : Tuple = sys.maxsize for c in range(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : Optional[int] = ( matrix[a][c] + matrix[c + 1][b] + array[a - 1] * array[c] * array[b] ) if cost < matrix[a][b]: lowerCAmelCase : Dict = cost lowerCAmelCase : List[str] = c return matrix, sol def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if i == j: print("""A""" + str(SCREAMING_SNAKE_CASE__ ) ,end=""" """ ) else: print("""(""" ,end=""" """ ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,optimal_solution[i][j] ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ ,optimal_solution[i][j] + 1 ,SCREAMING_SNAKE_CASE__ ) print(""")""" ,end=""" """ ) def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = [3_0, 3_5, 1_5, 5, 1_0, 2_0, 2_5] lowerCAmelCase : List[Any] = len(SCREAMING_SNAKE_CASE__ ) # Size of matrix created from above array will be # 30*35 35*15 15*5 5*10 10*20 20*25 lowerCAmelCase , lowerCAmelCase : Optional[Any] = matrix_chain_order(SCREAMING_SNAKE_CASE__ ) print("""No. of Operation required: """ + str(matrix[1][n - 1] ) ) print_optiomal_solution(SCREAMING_SNAKE_CASE__ ,1 ,n - 1 ) if __name__ == "__main__": main()
693
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Optional[int] ={ 'transfo-xl-wt103': 'https://huggingface.co/transfo-xl-wt103/resolve/main/config.json', } class _a ( snake_case_ ): _UpperCamelCase: Tuple = "transfo-xl" _UpperCamelCase: str = ["mems"] _UpperCamelCase: Dict = { "n_token": "vocab_size", "hidden_size": "d_model", "num_attention_heads": "n_head", "num_hidden_layers": "n_layer", } def __init__( self , lowercase_=267735 , lowercase_=[20000, 40000, 200000] , lowercase_=1024 , lowercase_=1024 , lowercase_=16 , lowercase_=64 , lowercase_=4096 , lowercase_=4 , lowercase_=False , lowercase_=18 , lowercase_=1600 , lowercase_=1000 , lowercase_=True , lowercase_=True , lowercase_=0 , lowercase_=-1 , lowercase_=True , lowercase_=0.1 , lowercase_=0.0 , lowercase_=True , lowercase_="normal" , lowercase_=0.0_1 , lowercase_=0.0_1 , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=0 , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : Union[str, Any] = [] self.cutoffs.extend(lowercase_ ) if proj_share_all_but_first: lowerCAmelCase : Optional[int] = [False] + [True] * len(self.cutoffs ) else: lowerCAmelCase : List[str] = [False] + [False] * len(self.cutoffs ) lowerCAmelCase : Optional[int] = d_model lowerCAmelCase : List[Any] = d_embed lowerCAmelCase : Union[str, Any] = d_head lowerCAmelCase : List[Any] = d_inner lowerCAmelCase : Optional[int] = div_val lowerCAmelCase : List[Any] = pre_lnorm lowerCAmelCase : Dict = n_layer lowerCAmelCase : Tuple = n_head lowerCAmelCase : Any = mem_len lowerCAmelCase : Union[str, Any] = same_length lowerCAmelCase : List[Any] = attn_type lowerCAmelCase : int = clamp_len lowerCAmelCase : List[str] = sample_softmax lowerCAmelCase : Optional[int] = adaptive lowerCAmelCase : Dict = dropout lowerCAmelCase : Optional[Any] = dropatt lowerCAmelCase : List[str] = untie_r lowerCAmelCase : List[str] = init lowerCAmelCase : Tuple = init_range lowerCAmelCase : str = proj_init_std lowerCAmelCase : str = init_std lowerCAmelCase : Optional[int] = layer_norm_epsilon super().__init__(eos_token_id=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> Optional[Any]: # Message copied from Transformer-XL documentation logger.info(f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" ) return -1 @max_position_embeddings.setter def _snake_case ( self , lowercase_ ) -> Dict: # Message copied from Transformer-XL documentation raise NotImplementedError( f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" )
693
1
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_tensorflow_text_available, is_torch_available lowerCAmelCase : int ={ 'configuration_ernie': ['ERNIE_PRETRAINED_CONFIG_ARCHIVE_MAP', 'ErnieConfig', 'ErnieOnnxConfig'], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Union[str, Any] =[ 'ERNIE_PRETRAINED_MODEL_ARCHIVE_LIST', 'ErnieForCausalLM', 'ErnieForMaskedLM', 'ErnieForMultipleChoice', 'ErnieForNextSentencePrediction', 'ErnieForPreTraining', 'ErnieForQuestionAnswering', 'ErnieForSequenceClassification', 'ErnieForTokenClassification', 'ErnieModel', 'ErniePreTrainedModel', ] if TYPE_CHECKING: from .configuration_ernie import ERNIE_PRETRAINED_CONFIG_ARCHIVE_MAP, ErnieConfig, ErnieOnnxConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_ernie import ( ERNIE_PRETRAINED_MODEL_ARCHIVE_LIST, ErnieForCausalLM, ErnieForMaskedLM, ErnieForMultipleChoice, ErnieForNextSentencePrediction, ErnieForPreTraining, ErnieForQuestionAnswering, ErnieForSequenceClassification, ErnieForTokenClassification, ErnieModel, ErniePreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
import torch from diffusers import DiffusionPipeline class _a ( snake_case_ ): def __init__( self , lowercase_ , lowercase_ ) -> int: super().__init__() self.register_modules(unet=lowercase_ , scheduler=lowercase_ ) def __call__( self ) -> List[Any]: lowerCAmelCase : Union[str, Any] = torch.randn( (1, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size) , ) lowerCAmelCase : Union[str, Any] = 1 lowerCAmelCase : Dict = self.unet(lowercase_ , lowercase_ ).sample lowerCAmelCase : str = self.scheduler.step(lowercase_ , lowercase_ , lowercase_ ).prev_sample lowerCAmelCase : Dict = scheduler_output - scheduler_output + torch.ones_like(lowercase_ ) return result
693
1
from __future__ import annotations from typing import Any def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' create_state_space_tree(SCREAMING_SNAKE_CASE__ ,[] ,0 ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if index == len(SCREAMING_SNAKE_CASE__ ): print(SCREAMING_SNAKE_CASE__ ) return create_state_space_tree(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,index + 1 ) current_subsequence.append(sequence[index] ) create_state_space_tree(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,index + 1 ) current_subsequence.pop() if __name__ == "__main__": lowerCAmelCase : list[Any] =[3, 1, 2, 4] generate_all_subsequences(seq) seq.clear() seq.extend(['A', 'B', 'C']) generate_all_subsequences(seq)
693
import pytest import requests from datasets.utils.file_utils import http_head from .utils import OfflineSimulationMode, RequestWouldHangIndefinitelyError, offline @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_TIMES_OUT ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): requests.request("""GET""" ,"""https://huggingface.co""" ) with pytest.raises(requests.exceptions.ConnectTimeout ): requests.request("""GET""" ,"""https://huggingface.co""" ,timeout=1.0 ) @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_FAILS ): with pytest.raises(requests.exceptions.ConnectionError ): requests.request("""GET""" ,"""https://huggingface.co""" ) def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.HF_DATASETS_OFFLINE_SET_TO_1 ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): http_head("""https://huggingface.co""" )
693
1
from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] ={ 'configuration_autoformer': [ 'AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'AutoformerConfig', ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =[ 'AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'AutoformerForPrediction', 'AutoformerModel', 'AutoformerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_autoformer import ( AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, AutoformerConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_autoformer import ( AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, AutoformerForPrediction, AutoformerModel, AutoformerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
import json import pathlib import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision, slow from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import DetrImageProcessor class _a ( unittest.TestCase ): def __init__( self , lowercase_ , lowercase_=7 , lowercase_=3 , lowercase_=30 , lowercase_=400 , lowercase_=True , lowercase_=None , lowercase_=True , lowercase_=1 / 255 , lowercase_=True , lowercase_=[0.5, 0.5, 0.5] , lowercase_=[0.5, 0.5, 0.5] , lowercase_=True , ) -> Tuple: # by setting size["longest_edge"] > max_resolution we're effectively not testing this :p lowerCAmelCase : Optional[Any] = size if size is not None else {"""shortest_edge""": 18, """longest_edge""": 1333} lowerCAmelCase : Optional[int] = parent lowerCAmelCase : Optional[int] = batch_size lowerCAmelCase : Dict = num_channels lowerCAmelCase : str = min_resolution lowerCAmelCase : Optional[Any] = max_resolution lowerCAmelCase : Optional[int] = do_resize lowerCAmelCase : List[str] = size lowerCAmelCase : Dict = do_rescale lowerCAmelCase : Union[str, Any] = rescale_factor lowerCAmelCase : int = do_normalize lowerCAmelCase : Union[str, Any] = image_mean lowerCAmelCase : Dict = image_std lowerCAmelCase : Optional[int] = do_pad def _snake_case ( self ) -> Any: return { "do_resize": self.do_resize, "size": self.size, "do_rescale": self.do_rescale, "rescale_factor": self.rescale_factor, "do_normalize": self.do_normalize, "image_mean": self.image_mean, "image_std": self.image_std, "do_pad": self.do_pad, } def _snake_case ( self , lowercase_ , lowercase_=False ) -> List[Any]: if not batched: lowerCAmelCase : Tuple = image_inputs[0] if isinstance(lowercase_ , Image.Image ): lowerCAmelCase , lowerCAmelCase : Dict = image.size else: lowerCAmelCase , lowerCAmelCase : Tuple = image.shape[1], image.shape[2] if w < h: lowerCAmelCase : Union[str, Any] = int(self.size["""shortest_edge"""] * h / w ) lowerCAmelCase : Optional[Any] = self.size["""shortest_edge"""] elif w > h: lowerCAmelCase : List[Any] = self.size["""shortest_edge"""] lowerCAmelCase : List[Any] = int(self.size["""shortest_edge"""] * w / h ) else: lowerCAmelCase : Optional[int] = self.size["""shortest_edge"""] lowerCAmelCase : List[str] = self.size["""shortest_edge"""] else: lowerCAmelCase : Optional[int] = [] for image in image_inputs: lowerCAmelCase , lowerCAmelCase : int = self.get_expected_values([image] ) expected_values.append((expected_height, expected_width) ) lowerCAmelCase : Any = max(lowercase_ , key=lambda lowercase_ : item[0] )[0] lowerCAmelCase : Union[str, Any] = max(lowercase_ , key=lambda lowercase_ : item[1] )[1] return expected_height, expected_width @require_torch @require_vision class _a ( snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = DetrImageProcessor if is_vision_available() else None def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : List[str] = DetrImageProcessingTester(self ) @property def _snake_case ( self ) -> str: return self.image_processor_tester.prepare_image_processor_dict() def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[str] = self.image_processing_class(**self.image_processor_dict ) self.assertTrue(hasattr(lowercase_ , """image_mean""" ) ) self.assertTrue(hasattr(lowercase_ , """image_std""" ) ) self.assertTrue(hasattr(lowercase_ , """do_normalize""" ) ) self.assertTrue(hasattr(lowercase_ , """do_rescale""" ) ) self.assertTrue(hasattr(lowercase_ , """rescale_factor""" ) ) self.assertTrue(hasattr(lowercase_ , """do_resize""" ) ) self.assertTrue(hasattr(lowercase_ , """size""" ) ) self.assertTrue(hasattr(lowercase_ , """do_pad""" ) ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Union[str, Any] = self.image_processing_class.from_dict(self.image_processor_dict ) self.assertEqual(image_processor.size , {"""shortest_edge""": 18, """longest_edge""": 1333} ) self.assertEqual(image_processor.do_pad , lowercase_ ) lowerCAmelCase : Optional[Any] = self.image_processing_class.from_dict( self.image_processor_dict , size=42 , max_size=84 , pad_and_return_pixel_mask=lowercase_ ) self.assertEqual(image_processor.size , {"""shortest_edge""": 42, """longest_edge""": 84} ) self.assertEqual(image_processor.do_pad , lowercase_ ) def _snake_case ( self ) -> List[Any]: pass def _snake_case ( self ) -> List[Any]: # Initialize image_processing lowerCAmelCase : str = self.image_processing_class(**self.image_processor_dict ) # create random PIL images lowerCAmelCase : str = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , Image.Image ) # Test not batched input lowerCAmelCase : int = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Tuple = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) lowerCAmelCase : Optional[int] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> Optional[int]: # Initialize image_processing lowerCAmelCase : int = self.image_processing_class(**self.image_processor_dict ) # create random numpy tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , numpify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , np.ndarray ) # Test not batched input lowerCAmelCase : List[Any] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Dict = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : List[Any] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : int = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> List[str]: # Initialize image_processing lowerCAmelCase : List[Any] = self.image_processing_class(**self.image_processor_dict ) # create random PyTorch tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , torchify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , torch.Tensor ) # Test not batched input lowerCAmelCase : Optional[int] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : str = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : List[str] = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) @slow def _snake_case ( self ) -> int: # prepare image and target lowerCAmelCase : Tuple = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_annotations.txt""" , """r""" ) as f: lowerCAmelCase : str = json.loads(f.read() ) lowerCAmelCase : List[Any] = {"""image_id""": 39769, """annotations""": target} # encode them lowerCAmelCase : Dict = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50""" ) lowerCAmelCase : List[str] = image_processing(images=lowercase_ , annotations=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Union[str, Any] = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : List[str] = torch.tensor([5_8_8_7.9_6_0_0, 1_1_2_5_0.2_0_6_1, 4_8_9_3_5_3.8_4_3_8, 8_3_7_1_2_2.7_5_0_0, 1_4_7_9_6_7.5_1_5_6, 1_6_5_7_3_2.3_4_3_8] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Tuple = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Dict = torch.tensor([0.5_5_0_3, 0.2_7_6_5, 0.0_6_0_4, 0.2_2_1_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : List[Any] = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Union[str, Any] = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : str = torch.tensor([75, 75, 63, 65, 17, 17] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify orig_size lowerCAmelCase : int = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : str = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) ) @slow def _snake_case ( self ) -> int: # prepare image, target and masks_path lowerCAmelCase : List[Any] = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_panoptic_annotations.txt""" , """r""" ) as f: lowerCAmelCase : Any = json.loads(f.read() ) lowerCAmelCase : Optional[Any] = {"""file_name""": """000000039769.png""", """image_id""": 39769, """segments_info""": target} lowerCAmelCase : List[str] = pathlib.Path("""./tests/fixtures/tests_samples/COCO/coco_panoptic""" ) # encode them lowerCAmelCase : Any = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50-panoptic""" ) lowerCAmelCase : Tuple = image_processing(images=lowercase_ , annotations=lowercase_ , masks_path=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Tuple = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : str = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : Union[str, Any] = torch.tensor([1_4_7_9_7_9.6_8_7_5, 1_6_5_5_2_7.0_4_6_9, 4_8_4_6_3_8.5_9_3_8, 1_1_2_9_2.9_3_7_5, 5_8_7_9.6_5_6_2, 7_6_3_4.1_1_4_7] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Optional[int] = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_6_2_5, 0.5_4_3_7, 0.4_6_8_8, 0.8_6_2_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : Tuple = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Any = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : Tuple = torch.tensor([17, 17, 63, 75, 75, 93] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify masks lowerCAmelCase : Union[str, Any] = 822873 self.assertEqual(encoding["""labels"""][0]["""masks"""].sum().item() , lowercase_ ) # verify orig_size lowerCAmelCase : str = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : List[str] = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) )
693
1
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_torch_available, ) lowerCAmelCase : int ={'configuration_encoder_decoder': ['EncoderDecoderConfig']} try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : str =['EncoderDecoderModel'] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Optional[Any] =['TFEncoderDecoderModel'] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : str =['FlaxEncoderDecoderModel'] if TYPE_CHECKING: from .configuration_encoder_decoder import EncoderDecoderConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_encoder_decoder import EncoderDecoderModel try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_encoder_decoder import TFEncoderDecoderModel try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_encoder_decoder import FlaxEncoderDecoderModel else: import sys lowerCAmelCase : Tuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = 0 while b > 0: if b & 1: res += a a += a b >>= 1 return res def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = 0 while b > 0: if b & 1: lowerCAmelCase : Optional[int] = ((res % c) + (a % c)) % c a += a b >>= 1 return res
693
1
import unittest import numpy as np import torch from diffusers import ScoreSdeVePipeline, ScoreSdeVeScheduler, UNetaDModel from diffusers.utils.testing_utils import enable_full_determinism, require_torch, slow, torch_device enable_full_determinism() class _a ( unittest.TestCase ): @property def _snake_case ( self ) -> Tuple: torch.manual_seed(0 ) lowerCAmelCase : List[Any] = UNetaDModel( block_out_channels=(32, 64) , layers_per_block=2 , sample_size=32 , in_channels=3 , out_channels=3 , down_block_types=("""DownBlock2D""", """AttnDownBlock2D""") , up_block_types=("""AttnUpBlock2D""", """UpBlock2D""") , ) return model def _snake_case ( self ) -> str: lowerCAmelCase : Any = self.dummy_uncond_unet lowerCAmelCase : Union[str, Any] = ScoreSdeVeScheduler() lowerCAmelCase : Tuple = ScoreSdeVePipeline(unet=lowercase_ , scheduler=lowercase_ ) sde_ve.to(lowercase_ ) sde_ve.set_progress_bar_config(disable=lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.manual_seed(0 ) lowerCAmelCase : Optional[Any] = sde_ve(num_inference_steps=2 , output_type="""numpy""" , generator=lowercase_ ).images lowerCAmelCase : Any = torch.manual_seed(0 ) lowerCAmelCase : Tuple = sde_ve(num_inference_steps=2 , output_type="""numpy""" , generator=lowercase_ , return_dict=lowercase_ )[ 0 ] lowerCAmelCase : str = image[0, -3:, -3:, -1] lowerCAmelCase : Optional[Any] = image_from_tuple[0, -3:, -3:, -1] assert image.shape == (1, 32, 32, 3) lowerCAmelCase : Tuple = np.array([0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2 assert np.abs(image_from_tuple_slice.flatten() - expected_slice ).max() < 1e-2 @slow @require_torch class _a ( unittest.TestCase ): def _snake_case ( self ) -> Dict: lowerCAmelCase : int = """google/ncsnpp-church-256""" lowerCAmelCase : int = UNetaDModel.from_pretrained(lowercase_ ) lowerCAmelCase : str = ScoreSdeVeScheduler.from_pretrained(lowercase_ ) lowerCAmelCase : Optional[Any] = ScoreSdeVePipeline(unet=lowercase_ , scheduler=lowercase_ ) sde_ve.to(lowercase_ ) sde_ve.set_progress_bar_config(disable=lowercase_ ) lowerCAmelCase : List[str] = torch.manual_seed(0 ) lowerCAmelCase : Any = sde_ve(num_inference_steps=10 , output_type="""numpy""" , generator=lowercase_ ).images lowerCAmelCase : Any = image[0, -3:, -3:, -1] assert image.shape == (1, 256, 256, 3) lowerCAmelCase : Union[str, Any] = np.array([0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0] ) assert np.abs(image_slice.flatten() - expected_slice ).max() < 1e-2
693
from math import factorial class _a : def __init__( self , lowercase_ , lowercase_ ) -> Optional[Any]: lowerCAmelCase : Union[str, Any] = real if isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Tuple = [1] * rank else: lowerCAmelCase : Any = rank def __repr__( self ) -> int: return ( f"""{self.real}+""" f"""{'+'.join(str(lowercase_ )+'E'+str(n+1 )for n,dual in enumerate(self.duals ) )}""" ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[Any] = self.duals.copy() while cur[-1] == 0: cur.pop(-1 ) return Dual(self.real , lowercase_ ) def __add__( self , lowercase_ ) -> Tuple: if not isinstance(lowercase_ , lowercase_ ): return Dual(self.real + other , self.duals ) lowerCAmelCase : int = self.duals.copy() lowerCAmelCase : Tuple = other.duals.copy() if len(lowercase_ ) > len(lowercase_ ): o_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) elif len(lowercase_ ) < len(lowercase_ ): s_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) lowerCAmelCase : List[Any] = [] for i in range(len(lowercase_ ) ): new_duals.append(s_dual[i] + o_dual[i] ) return Dual(self.real + other.real , lowercase_ ) _UpperCamelCase: List[Any] = __add__ def __sub__( self , lowercase_ ) -> Union[str, Any]: return self + other * -1 def __mul__( self , lowercase_ ) -> Optional[int]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Union[str, Any] = [] for i in self.duals: new_duals.append(i * other ) return Dual(self.real * other , lowercase_ ) lowerCAmelCase : Union[str, Any] = [0] * (len(self.duals ) + len(other.duals ) + 1) for i, item in enumerate(self.duals ): for j, jtem in enumerate(other.duals ): new_duals[i + j + 1] += item * jtem for k in range(len(self.duals ) ): new_duals[k] += self.duals[k] * other.real for index in range(len(other.duals ) ): new_duals[index] += other.duals[index] * self.real return Dual(self.real * other.real , lowercase_ ) _UpperCamelCase: str = __mul__ def __truediv__( self , lowercase_ ) -> Optional[Any]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[str] = [] for i in self.duals: new_duals.append(i / other ) return Dual(self.real / other , lowercase_ ) raise ValueError def __floordiv__( self , lowercase_ ) -> int: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = [] for i in self.duals: new_duals.append(i // other ) return Dual(self.real // other , lowercase_ ) raise ValueError def __pow__( self , lowercase_ ) -> str: if n < 0 or isinstance(lowercase_ , lowercase_ ): raise ValueError("""power must be a positive integer""" ) if n == 0: return 1 if n == 1: return self lowerCAmelCase : int = self for _ in range(n - 1 ): x *= self return x def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if not callable(SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires a function as input for func""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,(float, int) ): raise ValueError("""differentiate() requires a float as input for position""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires an int as input for order""" ) lowerCAmelCase : List[Any] = Dual(SCREAMING_SNAKE_CASE__ ,1 ) lowerCAmelCase : Optional[Any] = func(SCREAMING_SNAKE_CASE__ ) if order == 0: return result.real return result.duals[order - 1] * factorial(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": import doctest doctest.testmod() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return y**2 * y**4 print(differentiate(f, 9, 2))
693
1
from maths.prime_check import is_prime def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if not isinstance(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : str = F"""Input value of [number={number}] must be an integer""" raise TypeError(SCREAMING_SNAKE_CASE__ ) if is_prime(SCREAMING_SNAKE_CASE__ ) and is_prime(number + 2 ): return number + 2 else: return -1 if __name__ == "__main__": import doctest doctest.testmod()
693
from ..utils import DummyObject, requires_backends class _a ( metaclass=snake_case_ ): _UpperCamelCase: List[Any] = ["keras_nlp"] def __init__( self , *lowercase_ , **lowercase_ ) -> Tuple: requires_backends(self , ["""keras_nlp"""] )
693
1
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return int(input_a == input_a == 0 ) def _UpperCAmelCase ( ): '''simple docstring''' print("""Truth Table of NOR Gate:""" ) print("""| Input 1 | Input 2 | Output |""" ) print(F"""| 0 | 0 | {nor_gate(0 ,0 )} |""" ) print(F"""| 0 | 1 | {nor_gate(0 ,1 )} |""" ) print(F"""| 1 | 0 | {nor_gate(1 ,0 )} |""" ) print(F"""| 1 | 1 | {nor_gate(1 ,1 )} |""" ) if __name__ == "__main__": import doctest doctest.testmod() main()
693
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version('>=', '4.25.0')): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: from ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline else: from .pipeline_unclip import UnCLIPPipeline from .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline from .text_proj import UnCLIPTextProjModel
693
1
import tempfile import torch from diffusers import ( DEISMultistepScheduler, DPMSolverMultistepScheduler, DPMSolverSinglestepScheduler, UniPCMultistepScheduler, ) from .test_schedulers import SchedulerCommonTest class _a ( snake_case_ ): _UpperCamelCase: List[Any] = (DEISMultistepScheduler,) _UpperCamelCase: int = (("num_inference_steps", 25),) def _snake_case ( self , **lowercase_ ) -> Optional[Any]: lowerCAmelCase : str = { """num_train_timesteps""": 1000, """beta_start""": 0.0_0_0_1, """beta_end""": 0.0_2, """beta_schedule""": """linear""", """solver_order""": 2, } config.update(**lowercase_ ) return config def _snake_case ( self , lowercase_=0 , **lowercase_ ) -> Optional[int]: lowerCAmelCase : Optional[int] = dict(self.forward_default_kwargs ) lowerCAmelCase : List[str] = kwargs.pop("""num_inference_steps""" , lowercase_ ) lowerCAmelCase : str = self.dummy_sample lowerCAmelCase : Any = 0.1 * sample lowerCAmelCase : List[Any] = [residual + 0.2, residual + 0.1_5, residual + 0.1_0] for scheduler_class in self.scheduler_classes: lowerCAmelCase : List[Any] = self.get_scheduler_config(**lowercase_ ) lowerCAmelCase : int = scheduler_class(**lowercase_ ) scheduler.set_timesteps(lowercase_ ) # copy over dummy past residuals lowerCAmelCase : Optional[int] = dummy_past_residuals[: scheduler.config.solver_order] with tempfile.TemporaryDirectory() as tmpdirname: scheduler.save_config(lowercase_ ) lowerCAmelCase : Dict = scheduler_class.from_pretrained(lowercase_ ) new_scheduler.set_timesteps(lowercase_ ) # copy over dummy past residuals lowerCAmelCase : Optional[int] = dummy_past_residuals[: new_scheduler.config.solver_order] lowerCAmelCase , lowerCAmelCase : str = sample, sample for t in range(lowercase_ , time_step + scheduler.config.solver_order + 1 ): lowerCAmelCase : Optional[int] = scheduler.step(lowercase_ , lowercase_ , lowercase_ , **lowercase_ ).prev_sample lowerCAmelCase : Optional[int] = new_scheduler.step(lowercase_ , lowercase_ , lowercase_ , **lowercase_ ).prev_sample assert torch.sum(torch.abs(output - new_output ) ) < 1e-5, "Scheduler outputs are not identical" def _snake_case ( self ) -> int: pass def _snake_case ( self , lowercase_=0 , **lowercase_ ) -> Union[str, Any]: lowerCAmelCase : Dict = dict(self.forward_default_kwargs ) lowerCAmelCase : Any = kwargs.pop("""num_inference_steps""" , lowercase_ ) lowerCAmelCase : Optional[int] = self.dummy_sample lowerCAmelCase : int = 0.1 * sample lowerCAmelCase : int = [residual + 0.2, residual + 0.1_5, residual + 0.1_0] for scheduler_class in self.scheduler_classes: lowerCAmelCase : List[Any] = self.get_scheduler_config() lowerCAmelCase : Any = scheduler_class(**lowercase_ ) scheduler.set_timesteps(lowercase_ ) # copy over dummy past residuals (must be after setting timesteps) lowerCAmelCase : str = dummy_past_residuals[: scheduler.config.solver_order] with tempfile.TemporaryDirectory() as tmpdirname: scheduler.save_config(lowercase_ ) lowerCAmelCase : Union[str, Any] = scheduler_class.from_pretrained(lowercase_ ) # copy over dummy past residuals new_scheduler.set_timesteps(lowercase_ ) # copy over dummy past residual (must be after setting timesteps) lowerCAmelCase : Any = dummy_past_residuals[: new_scheduler.config.solver_order] lowerCAmelCase : Any = scheduler.step(lowercase_ , lowercase_ , lowercase_ , **lowercase_ ).prev_sample lowerCAmelCase : Optional[int] = new_scheduler.step(lowercase_ , lowercase_ , lowercase_ , **lowercase_ ).prev_sample assert torch.sum(torch.abs(output - new_output ) ) < 1e-5, "Scheduler outputs are not identical" def _snake_case ( self , lowercase_=None , **lowercase_ ) -> Optional[int]: if scheduler is None: lowerCAmelCase : Union[str, Any] = self.scheduler_classes[0] lowerCAmelCase : List[str] = self.get_scheduler_config(**lowercase_ ) lowerCAmelCase : Optional[Any] = scheduler_class(**lowercase_ ) lowerCAmelCase : List[Any] = self.scheduler_classes[0] lowerCAmelCase : str = self.get_scheduler_config(**lowercase_ ) lowerCAmelCase : Dict = scheduler_class(**lowercase_ ) lowerCAmelCase : int = 10 lowerCAmelCase : List[Any] = self.dummy_model() lowerCAmelCase : Tuple = self.dummy_sample_deter scheduler.set_timesteps(lowercase_ ) for i, t in enumerate(scheduler.timesteps ): lowerCAmelCase : Optional[int] = model(lowercase_ , lowercase_ ) lowerCAmelCase : List[str] = scheduler.step(lowercase_ , lowercase_ , lowercase_ ).prev_sample return sample def _snake_case ( self ) -> Any: lowerCAmelCase : Optional[int] = dict(self.forward_default_kwargs ) lowerCAmelCase : Optional[Any] = kwargs.pop("""num_inference_steps""" , lowercase_ ) for scheduler_class in self.scheduler_classes: lowerCAmelCase : Optional[Any] = self.get_scheduler_config() lowerCAmelCase : Optional[int] = scheduler_class(**lowercase_ ) lowerCAmelCase : Optional[Any] = self.dummy_sample lowerCAmelCase : Union[str, Any] = 0.1 * sample if num_inference_steps is not None and hasattr(lowercase_ , """set_timesteps""" ): scheduler.set_timesteps(lowercase_ ) elif num_inference_steps is not None and not hasattr(lowercase_ , """set_timesteps""" ): lowerCAmelCase : List[Any] = num_inference_steps # copy over dummy past residuals (must be done after set_timesteps) lowerCAmelCase : Any = [residual + 0.2, residual + 0.1_5, residual + 0.1_0] lowerCAmelCase : Any = dummy_past_residuals[: scheduler.config.solver_order] lowerCAmelCase : Tuple = scheduler.timesteps[5] lowerCAmelCase : Any = scheduler.timesteps[6] lowerCAmelCase : Union[str, Any] = scheduler.step(lowercase_ , lowercase_ , lowercase_ , **lowercase_ ).prev_sample lowerCAmelCase : Optional[int] = scheduler.step(lowercase_ , lowercase_ , lowercase_ , **lowercase_ ).prev_sample self.assertEqual(output_a.shape , sample.shape ) self.assertEqual(output_a.shape , output_a.shape ) def _snake_case ( self ) -> List[Any]: # make sure that iterating over schedulers with same config names gives same results # for defaults lowerCAmelCase : Any = DEISMultistepScheduler(**self.get_scheduler_config() ) lowerCAmelCase : Optional[Any] = self.full_loop(scheduler=lowercase_ ) lowerCAmelCase : List[Any] = torch.mean(torch.abs(lowercase_ ) ) assert abs(result_mean.item() - 0.2_3_9_1_6 ) < 1e-3 lowerCAmelCase : List[str] = DPMSolverSinglestepScheduler.from_config(scheduler.config ) lowerCAmelCase : Any = DPMSolverMultistepScheduler.from_config(scheduler.config ) lowerCAmelCase : Union[str, Any] = UniPCMultistepScheduler.from_config(scheduler.config ) lowerCAmelCase : Tuple = DEISMultistepScheduler.from_config(scheduler.config ) lowerCAmelCase : List[str] = self.full_loop(scheduler=lowercase_ ) lowerCAmelCase : List[str] = torch.mean(torch.abs(lowercase_ ) ) assert abs(result_mean.item() - 0.2_3_9_1_6 ) < 1e-3 def _snake_case ( self ) -> Union[str, Any]: for timesteps in [25, 50, 100, 999, 1000]: self.check_over_configs(num_train_timesteps=lowercase_ ) def _snake_case ( self ) -> Any: self.check_over_configs(thresholding=lowercase_ ) for order in [1, 2, 3]: for solver_type in ["logrho"]: for threshold in [0.5, 1.0, 2.0]: for prediction_type in ["epsilon", "sample"]: self.check_over_configs( thresholding=lowercase_ , prediction_type=lowercase_ , sample_max_value=lowercase_ , algorithm_type="""deis""" , solver_order=lowercase_ , solver_type=lowercase_ , ) def _snake_case ( self ) -> Optional[Any]: for prediction_type in ["epsilon", "v_prediction"]: self.check_over_configs(prediction_type=lowercase_ ) def _snake_case ( self ) -> List[str]: for algorithm_type in ["deis"]: for solver_type in ["logrho"]: for order in [1, 2, 3]: for prediction_type in ["epsilon", "sample"]: self.check_over_configs( solver_order=lowercase_ , solver_type=lowercase_ , prediction_type=lowercase_ , algorithm_type=lowercase_ , ) lowerCAmelCase : List[str] = self.full_loop( solver_order=lowercase_ , solver_type=lowercase_ , prediction_type=lowercase_ , algorithm_type=lowercase_ , ) assert not torch.isnan(lowercase_ ).any(), "Samples have nan numbers" def _snake_case ( self ) -> Dict: self.check_over_configs(lower_order_final=lowercase_ ) self.check_over_configs(lower_order_final=lowercase_ ) def _snake_case ( self ) -> Union[str, Any]: for num_inference_steps in [1, 2, 3, 5, 10, 50, 100, 999, 1000]: self.check_over_forward(num_inference_steps=lowercase_ , time_step=0 ) def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Tuple = self.full_loop() lowerCAmelCase : Optional[Any] = torch.mean(torch.abs(lowercase_ ) ) assert abs(result_mean.item() - 0.2_3_9_1_6 ) < 1e-3 def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : int = self.full_loop(prediction_type="""v_prediction""" ) lowerCAmelCase : List[Any] = torch.mean(torch.abs(lowercase_ ) ) assert abs(result_mean.item() - 0.0_9_1 ) < 1e-3 def _snake_case ( self ) -> int: lowerCAmelCase : Any = self.scheduler_classes[0] lowerCAmelCase : int = self.get_scheduler_config(thresholding=lowercase_ , dynamic_thresholding_ratio=0 ) lowerCAmelCase : List[str] = scheduler_class(**lowercase_ ) lowerCAmelCase : Tuple = 10 lowerCAmelCase : Tuple = self.dummy_model() lowerCAmelCase : Dict = self.dummy_sample_deter.half() scheduler.set_timesteps(lowercase_ ) for i, t in enumerate(scheduler.timesteps ): lowerCAmelCase : List[str] = model(lowercase_ , lowercase_ ) lowerCAmelCase : Optional[int] = scheduler.step(lowercase_ , lowercase_ , lowercase_ ).prev_sample assert sample.dtype == torch.floataa
693
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if p < 2: raise ValueError("""p should not be less than 2!""" ) elif p == 2: return True lowerCAmelCase : List[Any] = 4 lowerCAmelCase : Tuple = (1 << p) - 1 for _ in range(p - 2 ): lowerCAmelCase : Dict = ((s * s) - 2) % m return s == 0 if __name__ == "__main__": print(lucas_lehmer_test(7)) print(lucas_lehmer_test(11))
693
1
from pathlib import Path import cva import numpy as np from matplotlib import pyplot as plt def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = cva.getAffineTransform(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) return cva.warpAffine(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,(rows, cols) ) if __name__ == "__main__": # read original image lowerCAmelCase : Optional[int] =cva.imread( str(Path(__file__).resolve().parent.parent / 'image_data' / 'lena.jpg') ) # turn image in gray scale value lowerCAmelCase : Any =cva.cvtColor(image, cva.COLOR_BGR2GRAY) # get image shape lowerCAmelCase , lowerCAmelCase : Optional[Any] =gray_img.shape # set different points to rotate image lowerCAmelCase : List[str] =np.array([[50, 50], [200, 50], [50, 200]], np.floataa) lowerCAmelCase : Optional[Any] =np.array([[10, 100], [200, 50], [100, 250]], np.floataa) lowerCAmelCase : Optional[int] =np.array([[50, 50], [150, 50], [120, 200]], np.floataa) lowerCAmelCase : Dict =np.array([[10, 100], [80, 50], [180, 250]], np.floataa) # add all rotated images in a list lowerCAmelCase : List[Any] =[ gray_img, get_rotation(gray_img, ptsa, ptsa, img_rows, img_cols), get_rotation(gray_img, ptsa, ptsa, img_rows, img_cols), get_rotation(gray_img, ptsa, ptsa, img_rows, img_cols), ] # plot different image rotations lowerCAmelCase : str =plt.figure(1) lowerCAmelCase : int =['Original', 'Rotation 1', 'Rotation 2', 'Rotation 3'] for i, image in enumerate(images): plt.subplot(2, 2, i + 1), plt.imshow(image, 'gray') plt.title(titles[i]) plt.axis('off') plt.subplots_adjust(left=0.0, bottom=0.0_5, right=1.0, top=0.9_5) plt.show()
693
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin from . import IFPipelineTesterMixin @skip_mps class _a ( snake_case_ , snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = IFImgaImgSuperResolutionPipeline _UpperCamelCase: int = TEXT_GUIDED_IMAGE_VARIATION_PARAMS - {"width", "height"} _UpperCamelCase: Optional[int] = TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({"original_image"} ) _UpperCamelCase: List[str] = PipelineTesterMixin.required_optional_params - {"latents"} def _snake_case ( self ) -> int: return self._get_superresolution_dummy_components() def _snake_case ( self , lowercase_ , lowercase_=0 ) -> Optional[Any]: if str(lowercase_ ).startswith("""mps""" ): lowerCAmelCase : Any = torch.manual_seed(lowercase_ ) else: lowerCAmelCase : List[Any] = torch.Generator(device=lowercase_ ).manual_seed(lowercase_ ) lowerCAmelCase : Union[str, Any] = floats_tensor((1, 3, 32, 32) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[Any] = floats_tensor((1, 3, 16, 16) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[int] = { """prompt""": """A painting of a squirrel eating a burger""", """image""": image, """original_image""": original_image, """generator""": generator, """num_inference_steps""": 2, """output_type""": """numpy""", } return inputs @unittest.skipIf( torch_device != """cuda""" or not is_xformers_available() , reason="""XFormers attention is only available with CUDA and `xformers` installed""" , ) def _snake_case ( self ) -> Optional[int]: self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 ) def _snake_case ( self ) -> int: self._test_save_load_optional_components() @unittest.skipIf(torch_device != """cuda""" , reason="""float16 requires CUDA""" ) def _snake_case ( self ) -> Any: # Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder super().test_save_load_floataa(expected_max_diff=1e-1 ) def _snake_case ( self ) -> int: self._test_attention_slicing_forward_pass(expected_max_diff=1e-2 ) def _snake_case ( self ) -> Any: self._test_save_load_local() def _snake_case ( self ) -> str: self._test_inference_batch_single_identical( expected_max_diff=1e-2 , )
693
1
from typing import List, Optional, Tuple, Union import torch from ...models import UNetaDModel from ...schedulers import ScoreSdeVeScheduler from ...utils import randn_tensor from ..pipeline_utils import DiffusionPipeline, ImagePipelineOutput class _a ( snake_case_ ): _UpperCamelCase: UNetaDModel _UpperCamelCase: ScoreSdeVeScheduler def __init__( self , lowercase_ , lowercase_ ) -> Dict: super().__init__() self.register_modules(unet=lowercase_ , scheduler=lowercase_ ) @torch.no_grad() def __call__( self , lowercase_ = 1 , lowercase_ = 2000 , lowercase_ = None , lowercase_ = "pil" , lowercase_ = True , **lowercase_ , ) -> Union[ImagePipelineOutput, Tuple]: lowerCAmelCase : List[str] = self.unet.config.sample_size lowerCAmelCase : int = (batch_size, 3, img_size, img_size) lowerCAmelCase : Optional[int] = self.unet lowerCAmelCase : Optional[Any] = randn_tensor(lowercase_ , generator=lowercase_ ) * self.scheduler.init_noise_sigma lowerCAmelCase : Union[str, Any] = sample.to(self.device ) self.scheduler.set_timesteps(lowercase_ ) self.scheduler.set_sigmas(lowercase_ ) for i, t in enumerate(self.progress_bar(self.scheduler.timesteps ) ): lowerCAmelCase : Tuple = self.scheduler.sigmas[i] * torch.ones(shape[0] , device=self.device ) # correction step for _ in range(self.scheduler.config.correct_steps ): lowerCAmelCase : Tuple = self.unet(lowercase_ , lowercase_ ).sample lowerCAmelCase : List[Any] = self.scheduler.step_correct(lowercase_ , lowercase_ , generator=lowercase_ ).prev_sample # prediction step lowerCAmelCase : Optional[Any] = model(lowercase_ , lowercase_ ).sample lowerCAmelCase : int = self.scheduler.step_pred(lowercase_ , lowercase_ , lowercase_ , generator=lowercase_ ) lowerCAmelCase , lowerCAmelCase : int = output.prev_sample, output.prev_sample_mean lowerCAmelCase : Optional[int] = sample_mean.clamp(0 , 1 ) lowerCAmelCase : Union[str, Any] = sample.cpu().permute(0 , 2 , 3 , 1 ).numpy() if output_type == "pil": lowerCAmelCase : int = self.numpy_to_pil(lowercase_ ) if not return_dict: return (sample,) return ImagePipelineOutput(images=lowercase_ )
693
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={} class _a ( snake_case_ ): _UpperCamelCase: Tuple = "llama" _UpperCamelCase: List[str] = ["past_key_values"] def __init__( self , lowercase_=32000 , lowercase_=4096 , lowercase_=11008 , lowercase_=32 , lowercase_=32 , lowercase_=None , lowercase_="silu" , lowercase_=2048 , lowercase_=0.0_2 , lowercase_=1e-6 , lowercase_=True , lowercase_=0 , lowercase_=1 , lowercase_=2 , lowercase_=1 , lowercase_=False , lowercase_=None , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : str = max_position_embeddings lowerCAmelCase : int = hidden_size lowerCAmelCase : Optional[int] = intermediate_size lowerCAmelCase : int = num_hidden_layers lowerCAmelCase : Any = num_attention_heads # for backward compatibility if num_key_value_heads is None: lowerCAmelCase : Any = num_attention_heads lowerCAmelCase : Any = num_key_value_heads lowerCAmelCase : Any = hidden_act lowerCAmelCase : Union[str, Any] = initializer_range lowerCAmelCase : str = rms_norm_eps lowerCAmelCase : int = pretraining_tp lowerCAmelCase : int = use_cache lowerCAmelCase : Optional[Any] = rope_scaling self._rope_scaling_validation() super().__init__( pad_token_id=lowercase_ , bos_token_id=lowercase_ , eos_token_id=lowercase_ , tie_word_embeddings=lowercase_ , **lowercase_ , ) def _snake_case ( self ) -> Dict: if self.rope_scaling is None: return if not isinstance(self.rope_scaling , lowercase_ ) or len(self.rope_scaling ) != 2: raise ValueError( """`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, """ f"""got {self.rope_scaling}""" ) lowerCAmelCase : Union[str, Any] = self.rope_scaling.get("""type""" , lowercase_ ) lowerCAmelCase : Dict = self.rope_scaling.get("""factor""" , lowercase_ ) if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]: raise ValueError( f"""`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}""" ) if rope_scaling_factor is None or not isinstance(lowercase_ , lowercase_ ) or rope_scaling_factor <= 1.0: raise ValueError(f"""`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}""" )
693
1
'''simple docstring''' from __future__ import annotations from dataclasses import dataclass @dataclass class _a : _UpperCamelCase: Tuple = 42 _UpperCamelCase: int = None _UpperCamelCase: Dict = None def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' def is_valid_tree(SCREAMING_SNAKE_CASE__ ) -> bool: if node is None: return True if not isinstance(SCREAMING_SNAKE_CASE_ ,SCREAMING_SNAKE_CASE_ ): return False try: float(node.data ) except (TypeError, ValueError): return False return is_valid_tree(node.left ) and is_valid_tree(node.right ) if not is_valid_tree(SCREAMING_SNAKE_CASE_ ): raise ValueError( """Each node should be type of TreeNode and data should be float.""" ) def is_binary_search_tree_recursive_check( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) -> bool: if node is None: return True return ( left_bound < node.data < right_bound and is_binary_search_tree_recursive_check(node.left ,SCREAMING_SNAKE_CASE_ ,node.data ) and is_binary_search_tree_recursive_check( node.right ,node.data ,SCREAMING_SNAKE_CASE_ ) ) return is_binary_search_tree_recursive_check(SCREAMING_SNAKE_CASE_ ,-float("""inf""" ) ,float("""inf""" ) ) if __name__ == "__main__": import doctest doctest.testmod()
700
from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase : int =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={ 'microsoft/swin-tiny-patch4-window7-224': ( 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json' ), # See all Swin models at https://huggingface.co/models?filter=swin } class _a ( snake_case_ , snake_case_ ): _UpperCamelCase: int = "swin" _UpperCamelCase: str = { "num_attention_heads": "num_heads", "num_hidden_layers": "num_layers", } def __init__( self , lowercase_=224 , lowercase_=4 , lowercase_=3 , lowercase_=96 , lowercase_=[2, 2, 6, 2] , lowercase_=[3, 6, 12, 24] , lowercase_=7 , lowercase_=4.0 , lowercase_=True , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.1 , lowercase_="gelu" , lowercase_=False , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=32 , lowercase_=None , lowercase_=None , **lowercase_ , ) -> Tuple: super().__init__(**lowercase_ ) lowerCAmelCase : Optional[int] = image_size lowerCAmelCase : Optional[Any] = patch_size lowerCAmelCase : Optional[Any] = num_channels lowerCAmelCase : List[Any] = embed_dim lowerCAmelCase : str = depths lowerCAmelCase : List[str] = len(lowercase_ ) lowerCAmelCase : Any = num_heads lowerCAmelCase : str = window_size lowerCAmelCase : List[str] = mlp_ratio lowerCAmelCase : List[Any] = qkv_bias lowerCAmelCase : List[str] = hidden_dropout_prob lowerCAmelCase : int = attention_probs_dropout_prob lowerCAmelCase : Any = drop_path_rate lowerCAmelCase : int = hidden_act lowerCAmelCase : int = use_absolute_embeddings lowerCAmelCase : Dict = layer_norm_eps lowerCAmelCase : Any = initializer_range lowerCAmelCase : Dict = encoder_stride # we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel # this indicates the channel dimension after the last stage of the model lowerCAmelCase : Any = int(embed_dim * 2 ** (len(lowercase_ ) - 1) ) lowerCAmelCase : Dict = ["""stem"""] + [f"""stage{idx}""" for idx in range(1 , len(lowercase_ ) + 1 )] lowerCAmelCase , lowerCAmelCase : Optional[Any] = get_aligned_output_features_output_indices( out_features=lowercase_ , out_indices=lowercase_ , stage_names=self.stage_names ) class _a ( snake_case_ ): _UpperCamelCase: int = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-4
693
0
from maths.is_square_free import is_square_free from maths.prime_factors import prime_factors def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : List[Any] = prime_factors(__A ) if is_square_free(__A ): return -1 if len(__A ) % 2 else 1 return 0 if __name__ == "__main__": import doctest doctest.testmod()
701
lowerCAmelCase : str ={ 'Pillow': 'Pillow<10.0.0', 'accelerate': 'accelerate>=0.20.3', 'av': 'av==9.2.0', 'beautifulsoup4': 'beautifulsoup4', 'black': 'black~=23.1', 'codecarbon': 'codecarbon==1.2.0', 'cookiecutter': 'cookiecutter==1.7.3', 'dataclasses': 'dataclasses', 'datasets': 'datasets!=2.5.0', 'decord': 'decord==0.6.0', 'deepspeed': 'deepspeed>=0.9.3', 'diffusers': 'diffusers', 'dill': 'dill<0.3.5', 'evaluate': 'evaluate>=0.2.0', 'fairscale': 'fairscale>0.3', 'faiss-cpu': 'faiss-cpu', 'fastapi': 'fastapi', 'filelock': 'filelock', 'flax': 'flax>=0.4.1,<=0.7.0', 'ftfy': 'ftfy', 'fugashi': 'fugashi>=1.0', 'GitPython': 'GitPython<3.1.19', 'hf-doc-builder': 'hf-doc-builder>=0.3.0', 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0', 'importlib_metadata': 'importlib_metadata', 'ipadic': 'ipadic>=1.0.0,<2.0', 'isort': 'isort>=5.5.4', 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13', 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13', 'jieba': 'jieba', 'kenlm': 'kenlm', 'keras-nlp': 'keras-nlp>=0.3.1', 'librosa': 'librosa', 'nltk': 'nltk', 'natten': 'natten>=0.14.6', 'numpy': 'numpy>=1.17', 'onnxconverter-common': 'onnxconverter-common', 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2', 'onnxruntime': 'onnxruntime>=1.4.0', 'opencv-python': 'opencv-python', 'optuna': 'optuna', 'optax': 'optax>=0.0.8,<=0.1.4', 'packaging': 'packaging>=20.0', 'parameterized': 'parameterized', 'phonemizer': 'phonemizer', 'protobuf': 'protobuf', 'psutil': 'psutil', 'pyyaml': 'pyyaml>=5.1', 'pydantic': 'pydantic<2', 'pytest': 'pytest>=7.2.0', 'pytest-timeout': 'pytest-timeout', 'pytest-xdist': 'pytest-xdist', 'python': 'python>=3.8.0', 'ray[tune]': 'ray[tune]', 'regex': 'regex!=2019.12.17', 'requests': 'requests', 'rhoknp': 'rhoknp>=1.1.0,<1.3.1', 'rjieba': 'rjieba', 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1', 'ruff': 'ruff>=0.0.241,<=0.0.259', 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0', 'sacremoses': 'sacremoses', 'safetensors': 'safetensors>=0.3.1', 'sagemaker': 'sagemaker>=2.31.0', 'scikit-learn': 'scikit-learn', 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92', 'sigopt': 'sigopt', 'starlette': 'starlette', 'sudachipy': 'sudachipy>=0.6.6', 'sudachidict_core': 'sudachidict_core>=20220729', 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14', 'tensorflow': 'tensorflow>=2.6,<2.14', 'tensorflow-text': 'tensorflow-text<2.14', 'tf2onnx': 'tf2onnx', 'timeout-decorator': 'timeout-decorator', 'timm': 'timm', 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14', 'torch': 'torch>=1.9,!=1.12.0', 'torchaudio': 'torchaudio', 'torchvision': 'torchvision', 'pyctcdecode': 'pyctcdecode>=0.4.0', 'tqdm': 'tqdm>=4.27', 'unidic': 'unidic>=1.0.2', 'unidic_lite': 'unidic_lite>=1.0.7', 'urllib3': 'urllib3<2.0.0', 'uvicorn': 'uvicorn', }
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if index == number_of_items: return 0 lowerCAmelCase : Tuple = 0 lowerCAmelCase : int = 0 lowerCAmelCase : List[Any] = knapsack(_SCREAMING_SNAKE_CASE ,_SCREAMING_SNAKE_CASE ,_SCREAMING_SNAKE_CASE ,_SCREAMING_SNAKE_CASE ,index + 1 ) if weights[index] <= max_weight: lowerCAmelCase : Optional[int] = values[index] + knapsack( _SCREAMING_SNAKE_CASE ,_SCREAMING_SNAKE_CASE ,_SCREAMING_SNAKE_CASE ,max_weight - weights[index] ,index + 1 ) return max(_SCREAMING_SNAKE_CASE ,_SCREAMING_SNAKE_CASE ) if __name__ == "__main__": import doctest doctest.testmod()
702
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase : Union[str, Any] ={ 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'], 'tokenization_roformer': ['RoFormerTokenizer'], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =['RoFormerTokenizerFast'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Optional[int] =[ 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'RoFormerForCausalLM', 'RoFormerForMaskedLM', 'RoFormerForMultipleChoice', 'RoFormerForQuestionAnswering', 'RoFormerForSequenceClassification', 'RoFormerForTokenClassification', 'RoFormerLayer', 'RoFormerModel', 'RoFormerPreTrainedModel', 'load_tf_weights_in_roformer', ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'TFRoFormerForCausalLM', 'TFRoFormerForMaskedLM', 'TFRoFormerForMultipleChoice', 'TFRoFormerForQuestionAnswering', 'TFRoFormerForSequenceClassification', 'TFRoFormerForTokenClassification', 'TFRoFormerLayer', 'TFRoFormerModel', 'TFRoFormerPreTrainedModel', ] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : int =[ 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'FlaxRoFormerForMaskedLM', 'FlaxRoFormerForMultipleChoice', 'FlaxRoFormerForQuestionAnswering', 'FlaxRoFormerForSequenceClassification', 'FlaxRoFormerForTokenClassification', 'FlaxRoFormerModel', 'FlaxRoFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig from .tokenization_roformer import RoFormerTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_roformer_fast import RoFormerTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_roformer import ( ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, RoFormerForCausalLM, RoFormerForMaskedLM, RoFormerForMultipleChoice, RoFormerForQuestionAnswering, RoFormerForSequenceClassification, RoFormerForTokenClassification, RoFormerLayer, RoFormerModel, RoFormerPreTrainedModel, load_tf_weights_in_roformer, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_roformer import ( TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerLayer, TFRoFormerModel, TFRoFormerPreTrainedModel, ) try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_roformer import ( FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, FlaxRoFormerForMaskedLM, FlaxRoFormerForMultipleChoice, FlaxRoFormerForQuestionAnswering, FlaxRoFormerForSequenceClassification, FlaxRoFormerForTokenClassification, FlaxRoFormerModel, FlaxRoFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Tuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
0
from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database from google.protobuf.internal import builder as _builder # @@protoc_insertion_point(imports) lowerCAmelCase : Tuple =_symbol_database.Default() lowerCAmelCase : List[Any] =_descriptor_pool.Default().AddSerializedFile( b'\n\x19sentencepiece_model.proto\x12\rsentencepiece\"\x80\x0c\n\x0bTrainerSpec\x12\r\n\x05input\x18\x01 \x03(\t\x12\x14\n\x0cinput_format\x18\x07 \x01(\t\x12\x14\n\x0cmodel_prefix\x18\x02 \x01(\t\x12\x41\n\nmodel_type\x18\x03 \x01(\x0e\x32$.sentencepiece.TrainerSpec.ModelType:\x07UNIGRAM\x12\x18\n\nvocab_size\x18\x04 \x01(\x05:\x04\x38\x30\x30\x30\x12\x17\n\x0f\x61\x63\x63\x65pt_language\x18\x05 \x03(\t\x12 \n\x15self_test_sample_size\x18\x06 \x01(\x05:\x01\x30\x12*\n\x1b\x65nable_differential_privacy\x18\x32 \x01(\x08:\x05\x66\x61lse\x12+\n differential_privacy_noise_level\x18\x33 \x01(\x02:\x01\x30\x12\x32\n\'differential_privacy_clipping_threshold\x18\x34 \x01(\x04:\x01\x30\x12\"\n\x12\x63haracter_coverage\x18\n \x01(\x02:\x06\x30.9995\x12\x1e\n\x13input_sentence_size\x18\x0b \x01(\x04:\x01\x30\x12$\n\x16shuffle_input_sentence\x18\x13 \x01(\x08:\x04true\x12 \n\x14mining_sentence_size\x18\x0c \x01(\x05\x42\x02\x18\x01\x12\"\n\x16training_sentence_size\x18\r \x01(\x05\x42\x02\x18\x01\x12(\n\x17seed_sentencepiece_size\x18\x0e \x01(\x05:\x07\x31\x30\x30\x30\x30\x30\x30\x12\x1e\n\x10shrinking_factor\x18\x0f \x01(\x02:\x04\x30.75\x12!\n\x13max_sentence_length\x18\x12 \x01(\x05:\x04\x34\x31\x39\x32\x12\x17\n\x0bnum_threads\x18\x10 \x01(\x05:\x02\x31\x36\x12\x1d\n\x12num_sub_iterations\x18\x11 \x01(\x05:\x01\x32\x12$\n\x18max_sentencepiece_length\x18\x14 \x01(\x05:\x02\x31\x36\x12%\n\x17split_by_unicode_script\x18\x15 \x01(\x08:\x04true\x12\x1d\n\x0fsplit_by_number\x18\x17 \x01(\x08:\x04true\x12!\n\x13split_by_whitespace\x18\x16 \x01(\x08:\x04true\x12)\n\x1atreat_whitespace_as_suffix\x18\x18 \x01(\x08:\x05\x66\x61lse\x12+\n\x1c\x61llow_whitespace_only_pieces\x18\x1a \x01(\x08:\x05\x66\x61lse\x12\x1b\n\x0csplit_digits\x18\x19 \x01(\x08:\x05\x66\x61lse\x12#\n\x19pretokenization_delimiter\x18\x35 \x01(\t:\x00\x12\x17\n\x0f\x63ontrol_symbols\x18\x1e \x03(\t\x12\x1c\n\x14user_defined_symbols\x18\x1f \x03(\t\x12\x16\n\x0erequired_chars\x18$ \x01(\t\x12\x1c\n\rbyte_fallback\x18# \x01(\x08:\x05\x66\x61lse\x12+\n\x1dvocabulary_output_piece_score\x18 \x01(\x08:\x04true\x12\x1e\n\x10hard_vocab_limit\x18! \x01(\x08:\x04true\x12\x1c\n\ruse_all_vocab\x18\" \x01(\x08:\x05\x66\x61lse\x12\x11\n\x06unk_id\x18( \x01(\x05:\x01\x30\x12\x11\n\x06\x62os_id\x18) \x01(\x05:\x01\x31\x12\x11\n\x06\x65os_id\x18* \x01(\x05:\x01\x32\x12\x12\n\x06pad_id\x18+ \x01(\x05:\x02-1\x12\x18\n\tunk_piece\x18- \x01(\t:\x05<unk>\x12\x16\n\tbos_piece\x18. \x01(\t:\x03<s>\x12\x17\n\teos_piece\x18/ \x01(\t:\x04</s>\x12\x18\n\tpad_piece\x18\x30 \x01(\t:\x05<pad>\x12\x1a\n\x0bunk_surface\x18, \x01(\t:\x05 \xe2\x81\x87 \x12+\n\x1ctrain_extremely_large_corpus\x18\x31 \x01(\x08:\x05\x66\x61lse\"5\n\tModelType\x12\x0b\n\x07UNIGRAM\x10\x01\x12\x07\n\x03\x42PE\x10\x02\x12\x08\n\x04WORD\x10\x03\x12\x08\n\x04\x43HAR\x10\x04*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\"\xd1\x01\n\x0eNormalizerSpec\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x1c\n\x14precompiled_charsmap\x18\x02 \x01(\x0c\x12\x1e\n\x10\x61\x64\x64_dummy_prefix\x18\x03 \x01(\x08:\x04true\x12&\n\x18remove_extra_whitespaces\x18\x04 \x01(\x08:\x04true\x12 \n\x12\x65scape_whitespaces\x18\x05 \x01(\x08:\x04true\x12\x1e\n\x16normalization_rule_tsv\x18\x06 \x01(\t*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\"y\n\x0cSelfTestData\x12\x33\n\x07samples\x18\x01 \x03(\x0b\x32\".sentencepiece.SelfTestData.Sample\x1a)\n\x06Sample\x12\r\n\x05input\x18\x01 \x01(\t\x12\x10\n\x08\x65xpected\x18\x02 \x01(\t*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\"\xfe\x03\n\nModelProto\x12\x37\n\x06pieces\x18\x01 \x03(\x0b\x32\'.sentencepiece.ModelProto.SentencePiece\x12\x30\n\x0ctrainer_spec\x18\x02 \x01(\x0b\x32\x1a.sentencepiece.TrainerSpec\x12\x36\n\x0fnormalizer_spec\x18\x03 \x01(\x0b\x32\x1d.sentencepiece.NormalizerSpec\x12\x33\n\x0eself_test_data\x18\x04 \x01(\x0b\x32\x1b.sentencepiece.SelfTestData\x12\x38\n\x11\x64\x65normalizer_spec\x18\x05 \x01(\x0b\x32\x1d.sentencepiece.NormalizerSpec\x1a\xd2\x01\n\rSentencePiece\x12\r\n\x05piece\x18\x01 \x01(\t\x12\r\n\x05score\x18\x02 \x01(\x02\x12\x42\n\x04type\x18\x03 \x01(\x0e\x32,.sentencepiece.ModelProto.SentencePiece.Type:\x06NORMAL\"T\n\x04Type\x12\n\n\x06NORMAL\x10\x01\x12\x0b\n\x07UNKNOWN\x10\x02\x12\x0b\n\x07\x43ONTROL\x10\x03\x12\x10\n\x0cUSER_DEFINED\x10\x04\x12\x08\n\x04\x42YTE\x10\x06\x12\n\n\x06UNUSED\x10\x05*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02*\t\x08\xc8\x01\x10\x80\x80\x80\x80\x02\x42\x02H\x03' ) lowerCAmelCase : int =globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'sentencepiece_model_pb2', _globals) if _descriptor._USE_C_DESCRIPTORS is False: lowerCAmelCase : int =None lowerCAmelCase : List[str] =b"H\003" # (generated by protobuf compiler, but `_TRAINERSPEC` is not defined) # _TRAINERSPEC.fields_by_name["mining_sentence_size"]._options = None # _TRAINERSPEC.fields_by_name["mining_sentence_size"]._serialized_options = b"\030\001" # _TRAINERSPEC.fields_by_name["training_sentence_size"]._options = None # _TRAINERSPEC.fields_by_name["training_sentence_size"]._serialized_options = b"\030\001" lowerCAmelCase : Optional[Any] =45 lowerCAmelCase : Any =1_581 lowerCAmelCase : Tuple =1_517 lowerCAmelCase : List[str] =1_570 lowerCAmelCase : int =1_584 lowerCAmelCase : List[Any] =1_793 lowerCAmelCase : Optional[int] =1_795 lowerCAmelCase : Any =1_916 lowerCAmelCase : Tuple =1_864 lowerCAmelCase : List[Any] =1_905 lowerCAmelCase : Union[str, Any] =1_919 lowerCAmelCase : str =2_429 lowerCAmelCase : Any =2_208 lowerCAmelCase : Dict =2_418 lowerCAmelCase : Optional[Any] =2_323 lowerCAmelCase : Tuple =2_407 # @@protoc_insertion_point(module_scope)
703
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return int(input_a == input_a == 0 ) def _UpperCAmelCase ( ): '''simple docstring''' print("""Truth Table of NOR Gate:""" ) print("""| Input 1 | Input 2 | Output |""" ) print(F"""| 0 | 0 | {nor_gate(0 ,0 )} |""" ) print(F"""| 0 | 1 | {nor_gate(0 ,1 )} |""" ) print(F"""| 1 | 0 | {nor_gate(1 ,0 )} |""" ) print(F"""| 1 | 1 | {nor_gate(1 ,1 )} |""" ) if __name__ == "__main__": import doctest doctest.testmod() main()
693
0
from dataclasses import dataclass from typing import Dict, Optional, Tuple, Union import torch import torch.nn as nn from ..configuration_utils import ConfigMixin, register_to_config from ..utils import BaseOutput, apply_forward_hook from .attention_processor import AttentionProcessor, AttnProcessor from .modeling_utils import ModelMixin from .vae import Decoder, DecoderOutput, DiagonalGaussianDistribution, Encoder @dataclass class _a ( snake_case_ ): _UpperCamelCase: int = 42 class _a ( snake_case_ , snake_case_ ): _UpperCamelCase: Tuple = True @register_to_config def __init__( self , lowercase_ = 3 , lowercase_ = 3 , lowercase_ = ("DownEncoderBlock2D",) , lowercase_ = ("UpDecoderBlock2D",) , lowercase_ = (64,) , lowercase_ = 1 , lowercase_ = "silu" , lowercase_ = 4 , lowercase_ = 32 , lowercase_ = 32 , lowercase_ = 0.1_8_2_1_5 , ) -> str: super().__init__() # pass init params to Encoder lowerCAmelCase : str = Encoder( in_channels=UpperCamelCase_ , out_channels=UpperCamelCase_ , down_block_types=UpperCamelCase_ , block_out_channels=UpperCamelCase_ , layers_per_block=UpperCamelCase_ , act_fn=UpperCamelCase_ , norm_num_groups=UpperCamelCase_ , double_z=UpperCamelCase_ , ) # pass init params to Decoder lowerCAmelCase : Optional[int] = Decoder( in_channels=UpperCamelCase_ , out_channels=UpperCamelCase_ , up_block_types=UpperCamelCase_ , block_out_channels=UpperCamelCase_ , layers_per_block=UpperCamelCase_ , norm_num_groups=UpperCamelCase_ , act_fn=UpperCamelCase_ , ) lowerCAmelCase : Optional[int] = nn.Convad(2 * latent_channels , 2 * latent_channels , 1 ) lowerCAmelCase : Any = nn.Convad(UpperCamelCase_ , UpperCamelCase_ , 1 ) lowerCAmelCase : int = False lowerCAmelCase : Union[str, Any] = False # only relevant if vae tiling is enabled lowerCAmelCase : Dict = self.config.sample_size lowerCAmelCase : Any = ( self.config.sample_size[0] if isinstance(self.config.sample_size , (list, tuple) ) else self.config.sample_size ) lowerCAmelCase : str = int(sample_size / (2 ** (len(self.config.block_out_channels ) - 1)) ) lowerCAmelCase : List[Any] = 0.2_5 def _snake_case ( self , lowercase_ , lowercase_=False ) -> Union[str, Any]: if isinstance(UpperCamelCase_ , (Encoder, Decoder) ): lowerCAmelCase : str = value def _snake_case ( self , lowercase_ = True ) -> Optional[int]: lowerCAmelCase : Optional[Any] = use_tiling def _snake_case ( self ) -> str: self.enable_tiling(UpperCamelCase_ ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Optional[int] = True def _snake_case ( self ) -> int: lowerCAmelCase : Optional[int] = False @property # Copied from diffusers.models.unet_2d_condition.UNet2DConditionModel.attn_processors def _snake_case ( self ) -> Dict[str, AttentionProcessor]: lowerCAmelCase : int = {} def fn_recursive_add_processors(lowercase_ , lowercase_ , lowercase_ ): if hasattr(UpperCamelCase_ , """set_processor""" ): lowerCAmelCase : List[str] = module.processor for sub_name, child in module.named_children(): fn_recursive_add_processors(f"""{name}.{sub_name}""" , UpperCamelCase_ , UpperCamelCase_ ) return processors for name, module in self.named_children(): fn_recursive_add_processors(UpperCamelCase_ , UpperCamelCase_ , UpperCamelCase_ ) return processors def _snake_case ( self , lowercase_ ) -> int: lowerCAmelCase : Tuple = len(self.attn_processors.keys() ) if isinstance(UpperCamelCase_ , UpperCamelCase_ ) and len(UpperCamelCase_ ) != count: raise ValueError( f"""A dict of processors was passed, but the number of processors {len(UpperCamelCase_ )} does not match the""" f""" number of attention layers: {count}. Please make sure to pass {count} processor classes.""" ) def fn_recursive_attn_processor(lowercase_ , lowercase_ , lowercase_ ): if hasattr(UpperCamelCase_ , """set_processor""" ): if not isinstance(UpperCamelCase_ , UpperCamelCase_ ): module.set_processor(UpperCamelCase_ ) else: module.set_processor(processor.pop(f"""{name}.processor""" ) ) for sub_name, child in module.named_children(): fn_recursive_attn_processor(f"""{name}.{sub_name}""" , UpperCamelCase_ , UpperCamelCase_ ) for name, module in self.named_children(): fn_recursive_attn_processor(UpperCamelCase_ , UpperCamelCase_ , UpperCamelCase_ ) def _snake_case ( self ) -> int: self.set_attn_processor(AttnProcessor() ) @apply_forward_hook def _snake_case ( self , lowercase_ , lowercase_ = True ) -> AutoencoderKLOutput: if self.use_tiling and (x.shape[-1] > self.tile_sample_min_size or x.shape[-2] > self.tile_sample_min_size): return self.tiled_encode(UpperCamelCase_ , return_dict=UpperCamelCase_ ) if self.use_slicing and x.shape[0] > 1: lowerCAmelCase : Optional[Any] = [self.encoder(UpperCamelCase_ ) for x_slice in x.split(1 )] lowerCAmelCase : Optional[Any] = torch.cat(UpperCamelCase_ ) else: lowerCAmelCase : Any = self.encoder(UpperCamelCase_ ) lowerCAmelCase : Tuple = self.quant_conv(UpperCamelCase_ ) lowerCAmelCase : Any = DiagonalGaussianDistribution(UpperCamelCase_ ) if not return_dict: return (posterior,) return AutoencoderKLOutput(latent_dist=UpperCamelCase_ ) def _snake_case ( self , lowercase_ , lowercase_ = True ) -> Union[DecoderOutput, torch.FloatTensor]: if self.use_tiling and (z.shape[-1] > self.tile_latent_min_size or z.shape[-2] > self.tile_latent_min_size): return self.tiled_decode(UpperCamelCase_ , return_dict=UpperCamelCase_ ) lowerCAmelCase : Tuple = self.post_quant_conv(UpperCamelCase_ ) lowerCAmelCase : int = self.decoder(UpperCamelCase_ ) if not return_dict: return (dec,) return DecoderOutput(sample=UpperCamelCase_ ) @apply_forward_hook def _snake_case ( self , lowercase_ , lowercase_ = True ) -> Union[DecoderOutput, torch.FloatTensor]: if self.use_slicing and z.shape[0] > 1: lowerCAmelCase : int = [self._decode(UpperCamelCase_ ).sample for z_slice in z.split(1 )] lowerCAmelCase : Dict = torch.cat(UpperCamelCase_ ) else: lowerCAmelCase : List[str] = self._decode(UpperCamelCase_ ).sample if not return_dict: return (decoded,) return DecoderOutput(sample=UpperCamelCase_ ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> List[Any]: lowerCAmelCase : Optional[int] = min(a.shape[2] , b.shape[2] , UpperCamelCase_ ) for y in range(UpperCamelCase_ ): lowerCAmelCase : Optional[int] = a[:, :, -blend_extent + y, :] * (1 - y / blend_extent) + b[:, :, y, :] * (y / blend_extent) return b def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> str: lowerCAmelCase : Tuple = min(a.shape[3] , b.shape[3] , UpperCamelCase_ ) for x in range(UpperCamelCase_ ): lowerCAmelCase : Optional[int] = a[:, :, :, -blend_extent + x] * (1 - x / blend_extent) + b[:, :, :, x] * (x / blend_extent) return b def _snake_case ( self , lowercase_ , lowercase_ = True ) -> AutoencoderKLOutput: lowerCAmelCase : List[str] = int(self.tile_sample_min_size * (1 - self.tile_overlap_factor) ) lowerCAmelCase : Dict = int(self.tile_latent_min_size * self.tile_overlap_factor ) lowerCAmelCase : List[Any] = self.tile_latent_min_size - blend_extent # Split the image into 512x512 tiles and encode them separately. lowerCAmelCase : int = [] for i in range(0 , x.shape[2] , UpperCamelCase_ ): lowerCAmelCase : List[str] = [] for j in range(0 , x.shape[3] , UpperCamelCase_ ): lowerCAmelCase : Dict = x[:, :, i : i + self.tile_sample_min_size, j : j + self.tile_sample_min_size] lowerCAmelCase : Dict = self.encoder(UpperCamelCase_ ) lowerCAmelCase : Optional[int] = self.quant_conv(UpperCamelCase_ ) row.append(UpperCamelCase_ ) rows.append(UpperCamelCase_ ) lowerCAmelCase : Any = [] for i, row in enumerate(UpperCamelCase_ ): lowerCAmelCase : List[Any] = [] for j, tile in enumerate(UpperCamelCase_ ): # blend the above tile and the left tile # to the current tile and add the current tile to the result row if i > 0: lowerCAmelCase : List[str] = self.blend_v(rows[i - 1][j] , UpperCamelCase_ , UpperCamelCase_ ) if j > 0: lowerCAmelCase : Any = self.blend_h(row[j - 1] , UpperCamelCase_ , UpperCamelCase_ ) result_row.append(tile[:, :, :row_limit, :row_limit] ) result_rows.append(torch.cat(UpperCamelCase_ , dim=3 ) ) lowerCAmelCase : Tuple = torch.cat(UpperCamelCase_ , dim=2 ) lowerCAmelCase : int = DiagonalGaussianDistribution(UpperCamelCase_ ) if not return_dict: return (posterior,) return AutoencoderKLOutput(latent_dist=UpperCamelCase_ ) def _snake_case ( self , lowercase_ , lowercase_ = True ) -> Union[DecoderOutput, torch.FloatTensor]: lowerCAmelCase : str = int(self.tile_latent_min_size * (1 - self.tile_overlap_factor) ) lowerCAmelCase : List[Any] = int(self.tile_sample_min_size * self.tile_overlap_factor ) lowerCAmelCase : Dict = self.tile_sample_min_size - blend_extent # Split z into overlapping 64x64 tiles and decode them separately. # The tiles have an overlap to avoid seams between tiles. lowerCAmelCase : str = [] for i in range(0 , z.shape[2] , UpperCamelCase_ ): lowerCAmelCase : Dict = [] for j in range(0 , z.shape[3] , UpperCamelCase_ ): lowerCAmelCase : Optional[int] = z[:, :, i : i + self.tile_latent_min_size, j : j + self.tile_latent_min_size] lowerCAmelCase : List[str] = self.post_quant_conv(UpperCamelCase_ ) lowerCAmelCase : Union[str, Any] = self.decoder(UpperCamelCase_ ) row.append(UpperCamelCase_ ) rows.append(UpperCamelCase_ ) lowerCAmelCase : int = [] for i, row in enumerate(UpperCamelCase_ ): lowerCAmelCase : Optional[Any] = [] for j, tile in enumerate(UpperCamelCase_ ): # blend the above tile and the left tile # to the current tile and add the current tile to the result row if i > 0: lowerCAmelCase : Tuple = self.blend_v(rows[i - 1][j] , UpperCamelCase_ , UpperCamelCase_ ) if j > 0: lowerCAmelCase : Optional[int] = self.blend_h(row[j - 1] , UpperCamelCase_ , UpperCamelCase_ ) result_row.append(tile[:, :, :row_limit, :row_limit] ) result_rows.append(torch.cat(UpperCamelCase_ , dim=3 ) ) lowerCAmelCase : Optional[int] = torch.cat(UpperCamelCase_ , dim=2 ) if not return_dict: return (dec,) return DecoderOutput(sample=UpperCamelCase_ ) def _snake_case ( self , lowercase_ , lowercase_ = False , lowercase_ = True , lowercase_ = None , ) -> Union[DecoderOutput, torch.FloatTensor]: lowerCAmelCase : List[Any] = sample lowerCAmelCase : List[Any] = self.encode(UpperCamelCase_ ).latent_dist if sample_posterior: lowerCAmelCase : Optional[Any] = posterior.sample(generator=UpperCamelCase_ ) else: lowerCAmelCase : str = posterior.mode() lowerCAmelCase : Union[str, Any] = self.decode(UpperCamelCase_ ).sample if not return_dict: return (dec,) return DecoderOutput(sample=UpperCamelCase_ )
704
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available lowerCAmelCase : int ={ 'configuration_poolformer': [ 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'PoolFormerConfig', 'PoolFormerOnnxConfig', ] } try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : List[Any] =['PoolFormerFeatureExtractor'] lowerCAmelCase : List[str] =['PoolFormerImageProcessor'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'PoolFormerForImageClassification', 'PoolFormerModel', 'PoolFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_poolformer import ( POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, PoolFormerConfig, PoolFormerOnnxConfig, ) try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .feature_extraction_poolformer import PoolFormerFeatureExtractor from .image_processing_poolformer import PoolFormerImageProcessor try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_poolformer import ( POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, PoolFormerForImageClassification, PoolFormerModel, PoolFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure)
693
0
from typing import List, Optional, Union import numpy as np import PIL.Image from ...image_processing_utils import BaseImageProcessor, BatchFeature from ...image_transforms import rescale, resize, to_channel_dimension_format from ...image_utils import ( ChannelDimension, PILImageResampling, get_image_size, make_list_of_images, to_numpy_array, valid_images, ) from ...utils import TensorType, logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) class _a ( lowercase__ ): _UpperCamelCase: Optional[int] = ["pixel_values"] def __init__( self , lowercase_ = True , lowercase_ = 32 , lowercase_=PILImageResampling.BILINEAR , lowercase_ = True , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = do_resize lowerCAmelCase : List[Any] = do_rescale lowerCAmelCase : int = size_divisor lowerCAmelCase : Any = resample super().__init__(**__lowerCamelCase ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ ) -> List[Any]: lowerCAmelCase : List[Any] = get_image_size(__lowerCamelCase ) # Rounds the height and width down to the closest multiple of size_divisor lowerCAmelCase : Tuple = height // size_divisor * size_divisor lowerCAmelCase : Dict = width // size_divisor * size_divisor lowerCAmelCase : Tuple = resize(__lowerCamelCase , (new_h, new_w) , resample=__lowerCamelCase , data_format=__lowerCamelCase , **__lowerCamelCase ) return image def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ ) -> Tuple: return rescale(image=__lowerCamelCase , scale=__lowerCamelCase , data_format=__lowerCamelCase , **__lowerCamelCase ) def _snake_case ( self , lowercase_ , lowercase_ = None , lowercase_ = None , lowercase_=None , lowercase_ = None , lowercase_ = None , lowercase_ = ChannelDimension.FIRST , **lowercase_ , ) -> str: lowerCAmelCase : Dict = do_resize if do_resize is not None else self.do_resize lowerCAmelCase : Union[str, Any] = do_rescale if do_rescale is not None else self.do_rescale lowerCAmelCase : Dict = size_divisor if size_divisor is not None else self.size_divisor lowerCAmelCase : List[str] = resample if resample is not None else self.resample if do_resize and size_divisor is None: raise ValueError("""size_divisor is required for resizing""" ) lowerCAmelCase : Union[str, Any] = make_list_of_images(__lowerCamelCase ) if not valid_images(__lowerCamelCase ): raise ValueError("""Invalid image(s)""" ) # All transformations expect numpy arrays. lowerCAmelCase : Dict = [to_numpy_array(__lowerCamelCase ) for img in images] if do_resize: lowerCAmelCase : Optional[Any] = [self.resize(__lowerCamelCase , size_divisor=__lowerCamelCase , resample=__lowerCamelCase ) for image in images] if do_rescale: lowerCAmelCase : Any = [self.rescale(__lowerCamelCase , scale=1 / 255 ) for image in images] lowerCAmelCase : Optional[Any] = [to_channel_dimension_format(__lowerCamelCase , __lowerCamelCase ) for image in images] lowerCAmelCase : Dict = {"pixel_values": images} return BatchFeature(data=__lowerCamelCase , tensor_type=__lowerCamelCase )
705
import os import string import sys lowerCAmelCase : Optional[int] =1 << 8 lowerCAmelCase : List[Any] ={ 'tab': ord('\t'), 'newline': ord('\r'), 'esc': 27, 'up': 65 + ARROW_KEY_FLAG, 'down': 66 + ARROW_KEY_FLAG, 'right': 67 + ARROW_KEY_FLAG, 'left': 68 + ARROW_KEY_FLAG, 'mod_int': 91, 'undefined': sys.maxsize, 'interrupt': 3, 'insert': 50, 'delete': 51, 'pg_up': 53, 'pg_down': 54, } lowerCAmelCase : Optional[Any] =KEYMAP['up'] lowerCAmelCase : Tuple =KEYMAP['left'] if sys.platform == "win32": lowerCAmelCase : Dict =[] lowerCAmelCase : int ={ b'\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\x00H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\x00P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\x00M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG, b'\x00K': KEYMAP['left'] - ARROW_KEY_FLAG, } for i in range(10): lowerCAmelCase : Optional[Any] =ord(str(i)) def _UpperCAmelCase ( ): '''simple docstring''' if os.name == "nt": import msvcrt lowerCAmelCase : Any = """mbcs""" # Flush the keyboard buffer while msvcrt.kbhit(): msvcrt.getch() if len(SCREAMING_SNAKE_CASE__ ) == 0: # Read the keystroke lowerCAmelCase : int = msvcrt.getch() # If it is a prefix char, get second part if ch in (b"\x00", b"\xe0"): lowerCAmelCase : Tuple = ch + msvcrt.getch() # Translate actual Win chars to bullet char types try: lowerCAmelCase : str = chr(WIN_KEYMAP[cha] ) WIN_CH_BUFFER.append(chr(KEYMAP["""mod_int"""] ) ) WIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ ) if ord(SCREAMING_SNAKE_CASE__ ) in ( KEYMAP["insert"] - 1 << 9, KEYMAP["delete"] - 1 << 9, KEYMAP["pg_up"] - 1 << 9, KEYMAP["pg_down"] - 1 << 9, ): WIN_CH_BUFFER.append(chr(1_2_6 ) ) lowerCAmelCase : Optional[Any] = chr(KEYMAP["""esc"""] ) except KeyError: lowerCAmelCase : Optional[int] = cha[1] else: lowerCAmelCase : Any = ch.decode(SCREAMING_SNAKE_CASE__ ) else: lowerCAmelCase : Optional[int] = WIN_CH_BUFFER.pop(0 ) elif os.name == "posix": import termios import tty lowerCAmelCase : List[Any] = sys.stdin.fileno() lowerCAmelCase : str = termios.tcgetattr(SCREAMING_SNAKE_CASE__ ) try: tty.setraw(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[Any] = sys.stdin.read(1 ) finally: termios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ ) return ch def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP["interrupt"], KEYMAP["newline"]]: return char elif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["esc"]: lowerCAmelCase : int = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["mod_int"]: lowerCAmelCase : Tuple = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP["arrow_begin"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP["arrow_end"] - ARROW_KEY_FLAG: return chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG ) else: return KEYMAP["undefined"] else: return get_raw_chars() else: if char in string.printable: return char else: return KEYMAP["undefined"]
693
0
import unittest from transformers import load_tool from transformers.utils import is_torch_available if is_torch_available(): import torch from transformers.testing_utils import require_torch from .test_tools_common import ToolTesterMixin @require_torch class _a ( unittest.TestCase , __lowerCamelCase ): def _snake_case ( self ) -> Any: lowerCAmelCase : Union[str, Any] = load_tool("""text-to-speech""" ) self.tool.setup() def _snake_case ( self ) -> Any: # SpeechT5 isn't deterministic torch.manual_seed(0 ) lowerCAmelCase : Tuple = self.tool("""hey""" ) lowerCAmelCase : Optional[int] = result.to_raw() self.assertTrue( torch.allclose( resulting_tensor[:3] , torch.tensor([-0.0_0_0_5_9_6_6_6_6_8_8_3_2_1_1_5_8_2_9, -0.0_0_0_3_6_5_7_6_4_0_1_9_0_7_9_5_0_6_4, -0.0_0_0_1_3_4_3_9_5_0_2_7_9_9_8_8_3_4_8_5] ) , ) ) def _snake_case ( self ) -> Union[str, Any]: # SpeechT5 isn't deterministic torch.manual_seed(0 ) lowerCAmelCase : List[Any] = self.tool("""hey""" ) lowerCAmelCase : List[str] = result.to_raw() self.assertTrue( torch.allclose( resulting_tensor[:3] , torch.tensor([-0.0_0_0_5_9_6_6_6_6_8_8_3_2_1_1_5_8_2_9, -0.0_0_0_3_6_5_7_6_4_0_1_9_0_7_9_5_0_6_4, -0.0_0_0_1_3_4_3_9_5_0_2_7_9_9_8_8_3_4_8_5] ) , ) )
706
# Imports import numpy as np class _a : def __init__( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> List[Any]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) def _snake_case ( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Union[str, Any]: if red is not None: lowerCAmelCase : str = red if green is not None: lowerCAmelCase : Optional[int] = green if blue is not None: lowerCAmelCase : Optional[int] = blue if red_edge is not None: lowerCAmelCase : Tuple = red_edge if nir is not None: lowerCAmelCase : Union[str, Any] = nir return True def _snake_case ( self , lowercase_="" , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Optional[int]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) lowerCAmelCase : int = { """ARVI2""": self.arvaa, """CCCI""": self.ccci, """CVI""": self.cvi, """GLI""": self.gli, """NDVI""": self.ndvi, """BNDVI""": self.bndvi, """redEdgeNDVI""": self.red_edge_ndvi, """GNDVI""": self.gndvi, """GBNDVI""": self.gbndvi, """GRNDVI""": self.grndvi, """RBNDVI""": self.rbndvi, """PNDVI""": self.pndvi, """ATSAVI""": self.atsavi, """BWDRVI""": self.bwdrvi, """CIgreen""": self.ci_green, """CIrededge""": self.ci_rededge, """CI""": self.ci, """CTVI""": self.ctvi, """GDVI""": self.gdvi, """EVI""": self.evi, """GEMI""": self.gemi, """GOSAVI""": self.gosavi, """GSAVI""": self.gsavi, """Hue""": self.hue, """IVI""": self.ivi, """IPVI""": self.ipvi, """I""": self.i, """RVI""": self.rvi, """MRVI""": self.mrvi, """MSAVI""": self.m_savi, """NormG""": self.norm_g, """NormNIR""": self.norm_nir, """NormR""": self.norm_r, """NGRDI""": self.ngrdi, """RI""": self.ri, """S""": self.s, """IF""": self._if, """DVI""": self.dvi, """TVI""": self.tvi, """NDRE""": self.ndre, } try: return funcs[index]() except KeyError: print("""Index not in the list!""" ) return False def _snake_case ( self ) -> Dict: return -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red))) def _snake_case ( self ) -> Optional[Any]: return ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / ( (self.nir - self.red) / (self.nir + self.red) ) def _snake_case ( self ) -> List[str]: return self.nir * (self.red / (self.green**2)) def _snake_case ( self ) -> Tuple: return (2 * self.green - self.red - self.blue) / ( 2 * self.green + self.red + self.blue ) def _snake_case ( self ) -> Optional[int]: return (self.nir - self.red) / (self.nir + self.red) def _snake_case ( self ) -> List[str]: return (self.nir - self.blue) / (self.nir + self.blue) def _snake_case ( self ) -> int: return (self.redEdge - self.red) / (self.redEdge + self.red) def _snake_case ( self ) -> Optional[Any]: return (self.nir - self.green) / (self.nir + self.green) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.blue)) / ( self.nir + (self.green + self.blue) ) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.red)) / ( self.nir + (self.green + self.red) ) def _snake_case ( self ) -> int: return (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red)) def _snake_case ( self ) -> List[str]: return (self.nir - (self.green + self.red + self.blue)) / ( self.nir + (self.green + self.red + self.blue) ) def _snake_case ( self , lowercase_=0.0_8 , lowercase_=1.2_2 , lowercase_=0.0_3 ) -> int: return a * ( (self.nir - a * self.red - b) / (a * self.nir + self.red - a * b + x * (1 + a**2)) ) def _snake_case ( self ) -> Optional[Any]: return (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue) def _snake_case ( self ) -> Any: return (self.nir / self.green) - 1 def _snake_case ( self ) -> List[Any]: return (self.nir / self.redEdge) - 1 def _snake_case ( self ) -> str: return (self.red - self.blue) / self.red def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self.ndvi() return ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2)) def _snake_case ( self ) -> Optional[Any]: return self.nir - self.green def _snake_case ( self ) -> int: return 2.5 * ( (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1) ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Tuple = (2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / ( self.nir + self.red + 0.5 ) return n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red) def _snake_case ( self , lowercase_=0.1_6 ) -> Optional[int]: return (self.nir - self.green) / (self.nir + self.green + y) def _snake_case ( self , lowercase_=0.5 ) -> List[str]: return ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n) def _snake_case ( self ) -> Any: return np.arctan( ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) ) def _snake_case ( self , lowercase_=None , lowercase_=None ) -> List[Any]: return (self.nir - b) / (a * self.red) def _snake_case ( self ) -> Any: return (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1) def _snake_case ( self ) -> str: return (self.red + self.green + self.blue) / 3_0.5 def _snake_case ( self ) -> Union[str, Any]: return self.nir / self.red def _snake_case ( self ) -> Tuple: return (self.rvi() - 1) / (self.rvi() + 1) def _snake_case ( self ) -> Dict: return ( (2 * self.nir + 1) - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2) ) / 2 def _snake_case ( self ) -> List[Any]: return self.green / (self.nir + self.red + self.green) def _snake_case ( self ) -> int: return self.nir / (self.nir + self.red + self.green) def _snake_case ( self ) -> Dict: return self.red / (self.nir + self.red + self.green) def _snake_case ( self ) -> List[Any]: return (self.green - self.red) / (self.green + self.red) def _snake_case ( self ) -> Optional[int]: return (self.red - self.green) / (self.red + self.green) def _snake_case ( self ) -> Tuple: lowerCAmelCase : Any = np.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] ) lowerCAmelCase : Dict = np.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] ) return (max_value - min_value) / max_value def _snake_case ( self ) -> int: return (2 * self.red - self.green - self.blue) / (self.green - self.blue) def _snake_case ( self ) -> List[str]: return self.nir / self.red def _snake_case ( self ) -> int: return (self.ndvi() + 0.5) ** (1 / 2) def _snake_case ( self ) -> str: return (self.nir - self.redEdge) / (self.nir + self.redEdge)
693
0
import random def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : List[str] = num - 1 lowerCAmelCase : Dict = 0 while s % 2 == 0: lowerCAmelCase : Union[str, Any] = s // 2 t += 1 for _ in range(5 ): lowerCAmelCase : Any = random.randrange(2 ,num - 1 ) lowerCAmelCase : Any = pow(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) if v != 1: lowerCAmelCase : Any = 0 while v != (num - 1): if i == t - 1: return False else: lowerCAmelCase : Union[str, Any] = i + 1 lowerCAmelCase : Tuple = (v**2) % num return True def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if num < 2: return False lowerCAmelCase : List[str] = [ 2, 3, 5, 7, 1_1, 1_3, 1_7, 1_9, 2_3, 2_9, 3_1, 3_7, 4_1, 4_3, 4_7, 5_3, 5_9, 6_1, 6_7, 7_1, 7_3, 7_9, 8_3, 8_9, 9_7, 1_0_1, 1_0_3, 1_0_7, 1_0_9, 1_1_3, 1_2_7, 1_3_1, 1_3_7, 1_3_9, 1_4_9, 1_5_1, 1_5_7, 1_6_3, 1_6_7, 1_7_3, 1_7_9, 1_8_1, 1_9_1, 1_9_3, 1_9_7, 1_9_9, 2_1_1, 2_2_3, 2_2_7, 2_2_9, 2_3_3, 2_3_9, 2_4_1, 2_5_1, 2_5_7, 2_6_3, 2_6_9, 2_7_1, 2_7_7, 2_8_1, 2_8_3, 2_9_3, 3_0_7, 3_1_1, 3_1_3, 3_1_7, 3_3_1, 3_3_7, 3_4_7, 3_4_9, 3_5_3, 3_5_9, 3_6_7, 3_7_3, 3_7_9, 3_8_3, 3_8_9, 3_9_7, 4_0_1, 4_0_9, 4_1_9, 4_2_1, 4_3_1, 4_3_3, 4_3_9, 4_4_3, 4_4_9, 4_5_7, 4_6_1, 4_6_3, 4_6_7, 4_7_9, 4_8_7, 4_9_1, 4_9_9, 5_0_3, 5_0_9, 5_2_1, 5_2_3, 5_4_1, 5_4_7, 5_5_7, 5_6_3, 5_6_9, 5_7_1, 5_7_7, 5_8_7, 5_9_3, 5_9_9, 6_0_1, 6_0_7, 6_1_3, 6_1_7, 6_1_9, 6_3_1, 6_4_1, 6_4_3, 6_4_7, 6_5_3, 6_5_9, 6_6_1, 6_7_3, 6_7_7, 6_8_3, 6_9_1, 7_0_1, 7_0_9, 7_1_9, 7_2_7, 7_3_3, 7_3_9, 7_4_3, 7_5_1, 7_5_7, 7_6_1, 7_6_9, 7_7_3, 7_8_7, 7_9_7, 8_0_9, 8_1_1, 8_2_1, 8_2_3, 8_2_7, 8_2_9, 8_3_9, 8_5_3, 8_5_7, 8_5_9, 8_6_3, 8_7_7, 8_8_1, 8_8_3, 8_8_7, 9_0_7, 9_1_1, 9_1_9, 9_2_9, 9_3_7, 9_4_1, 9_4_7, 9_5_3, 9_6_7, 9_7_1, 9_7_7, 9_8_3, 9_9_1, 9_9_7, ] if num in low_primes: return True for prime in low_primes: if (num % prime) == 0: return False return rabin_miller(SCREAMING_SNAKE_CASE__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ = 1_0_2_4 ): '''simple docstring''' while True: lowerCAmelCase : int = random.randrange(2 ** (keysize - 1) ,2 ** (keysize) ) if is_prime_low_num(SCREAMING_SNAKE_CASE__ ): return num if __name__ == "__main__": lowerCAmelCase : str =generate_large_prime() print(('Prime number:', num)) print(('is_prime_low_num:', is_prime_low_num(num)))
707
import argparse import json import math import os import time import traceback import zipfile from collections import Counter import requests def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[str] = None if token is not None: lowerCAmelCase : Union[str, Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[Any] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{workflow_run_id}/jobs?per_page=100""" lowerCAmelCase : Any = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) lowerCAmelCase : int = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : List[str] = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) return job_links except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = None if token is not None: lowerCAmelCase : str = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[int] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{worflow_run_id}/artifacts?per_page=100""" lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) lowerCAmelCase : Optional[int] = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : int = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) return artifacts except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = None if token is not None: lowerCAmelCase : Optional[Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : str = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = result.headers["""Location"""] lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = os.path.join(SCREAMING_SNAKE_CASE__ ,F"""{artifact_name}.zip""" ) with open(SCREAMING_SNAKE_CASE__ ,"""wb""" ) as fp: fp.write(response.content ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = [] lowerCAmelCase : Optional[int] = [] lowerCAmelCase : Optional[int] = None with zipfile.ZipFile(SCREAMING_SNAKE_CASE__ ) as z: for filename in z.namelist(): if not os.path.isdir(SCREAMING_SNAKE_CASE__ ): # read the file if filename in ["failures_line.txt", "summary_short.txt", "job_name.txt"]: with z.open(SCREAMING_SNAKE_CASE__ ) as f: for line in f: lowerCAmelCase : Optional[Any] = line.decode("""UTF-8""" ).strip() if filename == "failures_line.txt": try: # `error_line` is the place where `error` occurs lowerCAmelCase : str = line[: line.index(""": """ )] lowerCAmelCase : Optional[int] = line[line.index(""": """ ) + len(""": """ ) :] errors.append([error_line, error] ) except Exception: # skip un-related lines pass elif filename == "summary_short.txt" and line.startswith("""FAILED """ ): # `test` is the test method that failed lowerCAmelCase : Union[str, Any] = line[len("""FAILED """ ) :] failed_tests.append(SCREAMING_SNAKE_CASE__ ) elif filename == "job_name.txt": lowerCAmelCase : Union[str, Any] = line if len(SCREAMING_SNAKE_CASE__ ) != len(SCREAMING_SNAKE_CASE__ ): raise ValueError( F"""`errors` and `failed_tests` should have the same number of elements. Got {len(SCREAMING_SNAKE_CASE__ )} for `errors` """ F"""and {len(SCREAMING_SNAKE_CASE__ )} for `failed_tests` instead. The test reports in {artifact_zip_path} have some""" """ problem.""" ) lowerCAmelCase : Optional[int] = None if job_name and job_links: lowerCAmelCase : Optional[int] = job_links.get(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) # A list with elements of the form (line of error, error, failed test) lowerCAmelCase : Union[str, Any] = [x + [y] + [job_link] for x, y in zip(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )] return result def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : str = [] lowerCAmelCase : Union[str, Any] = [os.path.join(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) for p in os.listdir(SCREAMING_SNAKE_CASE__ ) if p.endswith(""".zip""" )] for p in paths: errors.extend(get_errors_from_single_artifact(SCREAMING_SNAKE_CASE__ ,job_links=SCREAMING_SNAKE_CASE__ ) ) return errors def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = Counter() counter.update([x[1] for x in logs] ) lowerCAmelCase : List[str] = counter.most_common() lowerCAmelCase : Union[str, Any] = {} for error, count in counts: if error_filter is None or error not in error_filter: lowerCAmelCase : List[Any] = {"""count""": count, """failed_tests""": [(x[2], x[0]) for x in logs if x[1] == error]} lowerCAmelCase : int = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = test.split("""::""" )[0] if test.startswith("""tests/models/""" ): lowerCAmelCase : str = test.split("""/""" )[2] else: lowerCAmelCase : List[Any] = None return test def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[Any] = [(x[0], x[1], get_model(x[2] )) for x in logs] lowerCAmelCase : int = [x for x in logs if x[2] is not None] lowerCAmelCase : Optional[Any] = {x[2] for x in logs} lowerCAmelCase : Dict = {} for test in tests: lowerCAmelCase : Optional[int] = Counter() # count by errors in `test` counter.update([x[1] for x in logs if x[2] == test] ) lowerCAmelCase : Tuple = counter.most_common() lowerCAmelCase : Union[str, Any] = {error: count for error, count in counts if (error_filter is None or error not in error_filter)} lowerCAmelCase : List[Any] = sum(error_counts.values() ) if n_errors > 0: lowerCAmelCase : Optional[int] = {"""count""": n_errors, """errors""": error_counts} lowerCAmelCase : Any = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = """| no. | error | status |""" lowerCAmelCase : List[Any] = """|-:|:-|:-|""" lowerCAmelCase : Union[str, Any] = [header, sep] for error in reduced_by_error: lowerCAmelCase : List[str] = reduced_by_error[error]["""count"""] lowerCAmelCase : Any = F"""| {count} | {error[:1_0_0]} | |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = """| model | no. of errors | major error | count |""" lowerCAmelCase : Any = """|-:|-:|-:|-:|""" lowerCAmelCase : str = [header, sep] for model in reduced_by_model: lowerCAmelCase : Any = reduced_by_model[model]["""count"""] lowerCAmelCase , lowerCAmelCase : Optional[int] = list(reduced_by_model[model]["""errors"""].items() )[0] lowerCAmelCase : Optional[Any] = F"""| {model} | {count} | {error[:6_0]} | {_count} |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": lowerCAmelCase : int =argparse.ArgumentParser() # Required parameters parser.add_argument('--workflow_run_id', type=str, required=True, help='A GitHub Actions workflow run id.') parser.add_argument( '--output_dir', type=str, required=True, help='Where to store the downloaded artifacts and other result files.', ) parser.add_argument('--token', default=None, type=str, help='A token that has actions:read permission.') lowerCAmelCase : Dict =parser.parse_args() os.makedirs(args.output_dir, exist_ok=True) lowerCAmelCase : Optional[int] =get_job_links(args.workflow_run_id, token=args.token) lowerCAmelCase : List[Any] ={} # To deal with `workflow_call` event, where a job name is the combination of the job names in the caller and callee. # For example, `PyTorch 1.11 / Model tests (models/albert, single-gpu)`. if _job_links: for k, v in _job_links.items(): # This is how GitHub actions combine job names. if " / " in k: lowerCAmelCase : str =k.find(' / ') lowerCAmelCase : Any =k[index + len(' / ') :] lowerCAmelCase : str =v with open(os.path.join(args.output_dir, 'job_links.json'), 'w', encoding='UTF-8') as fp: json.dump(job_links, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Any =get_artifacts_links(args.workflow_run_id, token=args.token) with open(os.path.join(args.output_dir, 'artifacts.json'), 'w', encoding='UTF-8') as fp: json.dump(artifacts, fp, ensure_ascii=False, indent=4) for idx, (name, url) in enumerate(artifacts.items()): download_artifact(name, url, args.output_dir, args.token) # Be gentle to GitHub time.sleep(1) lowerCAmelCase : List[Any] =get_all_errors(args.output_dir, job_links=job_links) # `e[1]` is the error lowerCAmelCase : str =Counter() counter.update([e[1] for e in errors]) # print the top 30 most common test errors lowerCAmelCase : int =counter.most_common(30) for item in most_common: print(item) with open(os.path.join(args.output_dir, 'errors.json'), 'w', encoding='UTF-8') as fp: json.dump(errors, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Optional[int] =reduce_by_error(errors) lowerCAmelCase : Tuple =reduce_by_model(errors) lowerCAmelCase : Optional[Any] =make_github_table(reduced_by_error) lowerCAmelCase : Union[str, Any] =make_github_table_per_model(reduced_by_model) with open(os.path.join(args.output_dir, 'reduced_by_error.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa) with open(os.path.join(args.output_dir, 'reduced_by_model.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa)
693
0
'''simple docstring''' import argparse import torch from transformers import BlenderbotConfig, BlenderbotForConditionalGeneration from transformers.utils import logging logging.set_verbosity_info() lowerCAmelCase : Optional[Any] =logging.get_logger(__name__) lowerCAmelCase : Optional[Any] =[ ['attention', 'attn'], ['encoder_attention', 'encoder_attn'], ['q_lin', 'q_proj'], ['k_lin', 'k_proj'], ['v_lin', 'v_proj'], ['out_lin', 'out_proj'], ['norm_embeddings', 'layernorm_embedding'], ['position_embeddings', 'embed_positions'], ['embeddings', 'embed_tokens'], ['ffn.lin', 'fc'], ] def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if k == "embeddings.weight": return "shared.weight" for parlai_name, hf_name in PATTERNS: lowerCAmelCase : Optional[int] = k.replace(__A ,__A ) if k.startswith("""encoder""" ): lowerCAmelCase : Any = k.replace(""".attn""" ,""".self_attn""" ) lowerCAmelCase : List[Any] = k.replace("""norm1""" ,"""self_attn_layer_norm""" ) lowerCAmelCase : List[Any] = k.replace("""norm2""" ,"""final_layer_norm""" ) elif k.startswith("""decoder""" ): lowerCAmelCase : str = k.replace("""norm1""" ,"""self_attn_layer_norm""" ) lowerCAmelCase : int = k.replace("""norm2""" ,"""encoder_attn_layer_norm""" ) lowerCAmelCase : Optional[int] = k.replace("""norm3""" ,"""final_layer_norm""" ) return k def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = [ '''model.encoder.layernorm_embedding.weight''', '''model.encoder.layernorm_embedding.bias''', '''model.decoder.layernorm_embedding.weight''', '''model.decoder.layernorm_embedding.bias''', ] for k in keys: lowerCAmelCase : Optional[int] = sd.pop(__A ) lowerCAmelCase : List[str] = k.replace("""layernorm_embedding""" ,"""layer_norm""" ) assert new_k not in sd lowerCAmelCase : List[str] = v lowerCAmelCase : Optional[Any] =['START'] @torch.no_grad() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = torch.load(__A ,map_location="""cpu""" ) lowerCAmelCase : Optional[int] = model['''model'''] lowerCAmelCase : List[str] = BlenderbotConfig.from_json_file(__A ) lowerCAmelCase : Union[str, Any] = BlenderbotForConditionalGeneration(__A ) lowerCAmelCase : Tuple = m.model.state_dict().keys() lowerCAmelCase : Optional[Any] = [] lowerCAmelCase : Union[str, Any] = {} for k, v in sd.items(): if k in IGNORE_KEYS: continue lowerCAmelCase : int = rename_state_dict_key(__A ) if new_k not in valid_keys: failures.append([k, new_k] ) else: lowerCAmelCase : List[Any] = v if cfg.normalize_before: # Blenderbot-3B checkpoints. Rename layernorm_embedding -> layer_norm rename_layernorm_keys(__A ) m.model.load_state_dict(__A ,strict=__A ) m.half() m.save_pretrained(__A ) if __name__ == "__main__": lowerCAmelCase : List[Any] =argparse.ArgumentParser() # Required parameters parser.add_argument('--src_path', type=str, help='like blenderbot-model.bin') parser.add_argument('--save_dir', default='hf_blenderbot', type=str, help='Where to save converted model.') parser.add_argument( '--hf_config_json', default='blenderbot-3b-config.json', type=str, help='Path to config to use' ) lowerCAmelCase : Dict =parser.parse_args() convert_parlai_checkpoint(args.src_path, args.save_dir, args.hf_config_json)
708
from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] ={ 'configuration_autoformer': [ 'AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'AutoformerConfig', ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =[ 'AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'AutoformerForPrediction', 'AutoformerModel', 'AutoformerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_autoformer import ( AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, AutoformerConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_autoformer import ( AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, AutoformerForPrediction, AutoformerModel, AutoformerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
0
'''simple docstring''' import unittest from transformers import MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING, is_vision_available, pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_vision_available(): from PIL import Image else: class _a : @staticmethod def _snake_case ( *lowercase_ , **lowercase_ ) -> Optional[Any]: pass @is_pipeline_test @require_vision @require_torch class _a ( unittest.TestCase ): _UpperCamelCase: Optional[int] = MODEL_FOR_ZERO_SHOT_OBJECT_DETECTION_MAPPING def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> Union[str, Any]: lowerCAmelCase : Any = pipeline( """zero-shot-object-detection""" , model="""hf-internal-testing/tiny-random-owlvit-object-detection""" ) lowerCAmelCase : List[Any] = [ { """image""": """./tests/fixtures/tests_samples/COCO/000000039769.png""", """candidate_labels""": ["""cat""", """remote""", """couch"""], } ] return object_detector, examples def _snake_case ( self , lowercase_ , lowercase_ ) -> List[Any]: lowerCAmelCase : Optional[int] = object_detector(examples[0] , threshold=0.0 ) lowerCAmelCase : str = len(_lowerCAmelCase ) self.assertGreater(_lowerCAmelCase , 0 ) self.assertEqual( _lowerCAmelCase , [ { """score""": ANY(_lowerCAmelCase ), """label""": ANY(_lowerCAmelCase ), """box""": {"""xmin""": ANY(_lowerCAmelCase ), """ymin""": ANY(_lowerCAmelCase ), """xmax""": ANY(_lowerCAmelCase ), """ymax""": ANY(_lowerCAmelCase )}, } for i in range(_lowerCAmelCase ) ] , ) @require_tf @unittest.skip("""Zero Shot Object Detection not implemented in TF""" ) def _snake_case ( self ) -> int: pass @require_torch def _snake_case ( self ) -> Tuple: lowerCAmelCase : int = pipeline( """zero-shot-object-detection""" , model="""hf-internal-testing/tiny-random-owlvit-object-detection""" ) lowerCAmelCase : Any = object_detector( """./tests/fixtures/tests_samples/COCO/000000039769.png""" , candidate_labels=["""cat""", """remote""", """couch"""] , threshold=0.6_4 , ) self.assertEqual( nested_simplify(_lowerCAmelCase , decimals=4 ) , [ {"""score""": 0.7_2_3_5, """label""": """cat""", """box""": {"""xmin""": 204, """ymin""": 167, """xmax""": 232, """ymax""": 190}}, {"""score""": 0.7_2_1_8, """label""": """remote""", """box""": {"""xmin""": 204, """ymin""": 167, """xmax""": 232, """ymax""": 190}}, {"""score""": 0.7_1_8_4, """label""": """couch""", """box""": {"""xmin""": 204, """ymin""": 167, """xmax""": 232, """ymax""": 190}}, {"""score""": 0.6_7_4_8, """label""": """remote""", """box""": {"""xmin""": 571, """ymin""": 83, """xmax""": 598, """ymax""": 103}}, {"""score""": 0.6_6_5_6, """label""": """cat""", """box""": {"""xmin""": 571, """ymin""": 83, """xmax""": 598, """ymax""": 103}}, {"""score""": 0.6_6_1_4, """label""": """couch""", """box""": {"""xmin""": 571, """ymin""": 83, """xmax""": 598, """ymax""": 103}}, {"""score""": 0.6_4_5_6, """label""": """remote""", """box""": {"""xmin""": 494, """ymin""": 105, """xmax""": 521, """ymax""": 127}}, {"""score""": 0.6_4_2, """label""": """remote""", """box""": {"""xmin""": 67, """ymin""": 274, """xmax""": 93, """ymax""": 297}}, {"""score""": 0.6_4_1_9, """label""": """cat""", """box""": {"""xmin""": 494, """ymin""": 105, """xmax""": 521, """ymax""": 127}}, ] , ) lowerCAmelCase : Union[str, Any] = object_detector( [ { """image""": """./tests/fixtures/tests_samples/COCO/000000039769.png""", """candidate_labels""": ["""cat""", """remote""", """couch"""], } ] , threshold=0.6_4 , ) self.assertEqual( nested_simplify(_lowerCAmelCase , decimals=4 ) , [ [ {"""score""": 0.7_2_3_5, """label""": """cat""", """box""": {"""xmin""": 204, """ymin""": 167, """xmax""": 232, """ymax""": 190}}, {"""score""": 0.7_2_1_8, """label""": """remote""", """box""": {"""xmin""": 204, """ymin""": 167, """xmax""": 232, """ymax""": 190}}, {"""score""": 0.7_1_8_4, """label""": """couch""", """box""": {"""xmin""": 204, """ymin""": 167, """xmax""": 232, """ymax""": 190}}, {"""score""": 0.6_7_4_8, """label""": """remote""", """box""": {"""xmin""": 571, """ymin""": 83, """xmax""": 598, """ymax""": 103}}, {"""score""": 0.6_6_5_6, """label""": """cat""", """box""": {"""xmin""": 571, """ymin""": 83, """xmax""": 598, """ymax""": 103}}, {"""score""": 0.6_6_1_4, """label""": """couch""", """box""": {"""xmin""": 571, """ymin""": 83, """xmax""": 598, """ymax""": 103}}, {"""score""": 0.6_4_5_6, """label""": """remote""", """box""": {"""xmin""": 494, """ymin""": 105, """xmax""": 521, """ymax""": 127}}, {"""score""": 0.6_4_2, """label""": """remote""", """box""": {"""xmin""": 67, """ymin""": 274, """xmax""": 93, """ymax""": 297}}, {"""score""": 0.6_4_1_9, """label""": """cat""", """box""": {"""xmin""": 494, """ymin""": 105, """xmax""": 521, """ymax""": 127}}, ] ] , ) @require_torch @slow def _snake_case ( self ) -> Dict: lowerCAmelCase : List[str] = pipeline("""zero-shot-object-detection""" ) lowerCAmelCase : Optional[Any] = object_detector( """http://images.cocodataset.org/val2017/000000039769.jpg""" , candidate_labels=["""cat""", """remote""", """couch"""] , ) self.assertEqual( nested_simplify(_lowerCAmelCase , decimals=4 ) , [ {"""score""": 0.2_8_6_8, """label""": """cat""", """box""": {"""xmin""": 324, """ymin""": 20, """xmax""": 640, """ymax""": 373}}, {"""score""": 0.2_7_7, """label""": """remote""", """box""": {"""xmin""": 40, """ymin""": 72, """xmax""": 177, """ymax""": 115}}, {"""score""": 0.2_5_3_7, """label""": """cat""", """box""": {"""xmin""": 1, """ymin""": 55, """xmax""": 315, """ymax""": 472}}, {"""score""": 0.1_4_7_4, """label""": """remote""", """box""": {"""xmin""": 335, """ymin""": 74, """xmax""": 371, """ymax""": 187}}, {"""score""": 0.1_2_0_8, """label""": """couch""", """box""": {"""xmin""": 4, """ymin""": 0, """xmax""": 642, """ymax""": 476}}, ] , ) lowerCAmelCase : int = object_detector( [ { """image""": """http://images.cocodataset.org/val2017/000000039769.jpg""", """candidate_labels""": ["""cat""", """remote""", """couch"""], }, { """image""": """http://images.cocodataset.org/val2017/000000039769.jpg""", """candidate_labels""": ["""cat""", """remote""", """couch"""], }, ] , ) self.assertEqual( nested_simplify(_lowerCAmelCase , decimals=4 ) , [ [ {"""score""": 0.2_8_6_8, """label""": """cat""", """box""": {"""xmin""": 324, """ymin""": 20, """xmax""": 640, """ymax""": 373}}, {"""score""": 0.2_7_7, """label""": """remote""", """box""": {"""xmin""": 40, """ymin""": 72, """xmax""": 177, """ymax""": 115}}, {"""score""": 0.2_5_3_7, """label""": """cat""", """box""": {"""xmin""": 1, """ymin""": 55, """xmax""": 315, """ymax""": 472}}, {"""score""": 0.1_4_7_4, """label""": """remote""", """box""": {"""xmin""": 335, """ymin""": 74, """xmax""": 371, """ymax""": 187}}, {"""score""": 0.1_2_0_8, """label""": """couch""", """box""": {"""xmin""": 4, """ymin""": 0, """xmax""": 642, """ymax""": 476}}, ], [ {"""score""": 0.2_8_6_8, """label""": """cat""", """box""": {"""xmin""": 324, """ymin""": 20, """xmax""": 640, """ymax""": 373}}, {"""score""": 0.2_7_7, """label""": """remote""", """box""": {"""xmin""": 40, """ymin""": 72, """xmax""": 177, """ymax""": 115}}, {"""score""": 0.2_5_3_7, """label""": """cat""", """box""": {"""xmin""": 1, """ymin""": 55, """xmax""": 315, """ymax""": 472}}, {"""score""": 0.1_4_7_4, """label""": """remote""", """box""": {"""xmin""": 335, """ymin""": 74, """xmax""": 371, """ymax""": 187}}, {"""score""": 0.1_2_0_8, """label""": """couch""", """box""": {"""xmin""": 4, """ymin""": 0, """xmax""": 642, """ymax""": 476}}, ], ] , ) @require_tf @unittest.skip("""Zero Shot Object Detection not implemented in TF""" ) def _snake_case ( self ) -> List[str]: pass @require_torch @slow def _snake_case ( self ) -> Dict: lowerCAmelCase : Union[str, Any] = 0.2 lowerCAmelCase : Optional[Any] = pipeline("""zero-shot-object-detection""" ) lowerCAmelCase : List[Any] = object_detector( """http://images.cocodataset.org/val2017/000000039769.jpg""" , candidate_labels=["""cat""", """remote""", """couch"""] , threshold=_lowerCAmelCase , ) self.assertEqual( nested_simplify(_lowerCAmelCase , decimals=4 ) , [ {"""score""": 0.2_8_6_8, """label""": """cat""", """box""": {"""xmin""": 324, """ymin""": 20, """xmax""": 640, """ymax""": 373}}, {"""score""": 0.2_7_7, """label""": """remote""", """box""": {"""xmin""": 40, """ymin""": 72, """xmax""": 177, """ymax""": 115}}, {"""score""": 0.2_5_3_7, """label""": """cat""", """box""": {"""xmin""": 1, """ymin""": 55, """xmax""": 315, """ymax""": 472}}, ] , ) @require_torch @slow def _snake_case ( self ) -> str: lowerCAmelCase : str = 2 lowerCAmelCase : Optional[Any] = pipeline("""zero-shot-object-detection""" ) lowerCAmelCase : Tuple = object_detector( """http://images.cocodataset.org/val2017/000000039769.jpg""" , candidate_labels=["""cat""", """remote""", """couch"""] , top_k=_lowerCAmelCase , ) self.assertEqual( nested_simplify(_lowerCAmelCase , decimals=4 ) , [ {"""score""": 0.2_8_6_8, """label""": """cat""", """box""": {"""xmin""": 324, """ymin""": 20, """xmax""": 640, """ymax""": 373}}, {"""score""": 0.2_7_7, """label""": """remote""", """box""": {"""xmin""": 40, """ymin""": 72, """xmax""": 177, """ymax""": 115}}, ] , )
709
import copy from collections import OrderedDict from typing import Dict, Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto import CONFIG_MAPPING lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Union[str, Any] ={ 'facebook/detr-resnet-50': 'https://huggingface.co/facebook/detr-resnet-50/resolve/main/config.json', # See all DETR models at https://huggingface.co/models?filter=detr } class _a ( snake_case_ ): _UpperCamelCase: List[str] = "detr" _UpperCamelCase: Dict = ["past_key_values"] _UpperCamelCase: Optional[int] = { "hidden_size": "d_model", "num_attention_heads": "encoder_attention_heads", } def __init__( self , lowercase_=True , lowercase_=None , lowercase_=3 , lowercase_=100 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=True , lowercase_="relu" , lowercase_=256 , lowercase_=0.1 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.0_2 , lowercase_=1.0 , lowercase_=False , lowercase_="sine" , lowercase_="resnet50" , lowercase_=True , lowercase_=False , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=1 , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=0.1 , **lowercase_ , ) -> Optional[int]: if backbone_config is not None and use_timm_backbone: raise ValueError("""You can't specify both `backbone_config` and `use_timm_backbone`.""" ) if not use_timm_backbone: if backbone_config is None: logger.info("""`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.""" ) lowerCAmelCase : Optional[Any] = CONFIG_MAPPING["""resnet"""](out_features=["""stage4"""] ) elif isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = backbone_config.get("""model_type""" ) lowerCAmelCase : int = CONFIG_MAPPING[backbone_model_type] lowerCAmelCase : Optional[int] = config_class.from_dict(lowercase_ ) # set timm attributes to None lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Dict = None, None, None lowerCAmelCase : Any = use_timm_backbone lowerCAmelCase : int = backbone_config lowerCAmelCase : Optional[int] = num_channels lowerCAmelCase : Optional[Any] = num_queries lowerCAmelCase : List[str] = d_model lowerCAmelCase : Optional[int] = encoder_ffn_dim lowerCAmelCase : Dict = encoder_layers lowerCAmelCase : str = encoder_attention_heads lowerCAmelCase : List[Any] = decoder_ffn_dim lowerCAmelCase : List[Any] = decoder_layers lowerCAmelCase : Union[str, Any] = decoder_attention_heads lowerCAmelCase : str = dropout lowerCAmelCase : Dict = attention_dropout lowerCAmelCase : Union[str, Any] = activation_dropout lowerCAmelCase : str = activation_function lowerCAmelCase : Optional[int] = init_std lowerCAmelCase : Any = init_xavier_std lowerCAmelCase : Dict = encoder_layerdrop lowerCAmelCase : int = decoder_layerdrop lowerCAmelCase : Tuple = encoder_layers lowerCAmelCase : Optional[int] = auxiliary_loss lowerCAmelCase : List[str] = position_embedding_type lowerCAmelCase : Any = backbone lowerCAmelCase : Union[str, Any] = use_pretrained_backbone lowerCAmelCase : List[Any] = dilation # Hungarian matcher lowerCAmelCase : Tuple = class_cost lowerCAmelCase : Union[str, Any] = bbox_cost lowerCAmelCase : Optional[Any] = giou_cost # Loss coefficients lowerCAmelCase : List[Any] = mask_loss_coefficient lowerCAmelCase : Optional[int] = dice_loss_coefficient lowerCAmelCase : Tuple = bbox_loss_coefficient lowerCAmelCase : Dict = giou_loss_coefficient lowerCAmelCase : str = eos_coefficient super().__init__(is_encoder_decoder=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> int: return self.encoder_attention_heads @property def _snake_case ( self ) -> int: return self.d_model @classmethod def _snake_case ( cls , lowercase_ , **lowercase_ ) -> Any: return cls(backbone_config=lowercase_ , **lowercase_ ) def _snake_case ( self ) -> Dict[str, any]: lowerCAmelCase : Optional[int] = copy.deepcopy(self.__dict__ ) if output["backbone_config"] is not None: lowerCAmelCase : List[str] = self.backbone_config.to_dict() lowerCAmelCase : List[Any] = self.__class__.model_type return output class _a ( snake_case_ ): _UpperCamelCase: Any = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ("""pixel_mask""", {0: """batch"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-5 @property def _snake_case ( self ) -> int: return 12
693
0
import argparse import fairseq import torch from torch import nn from transformers import ( MBartaaTokenizer, MBartConfig, MBartForCausalLM, SpeechEncoderDecoderConfig, SpeechEncoderDecoderModel, WavaVecaConfig, WavaVecaFeatureExtractor, WavaVecaModel, logging, ) logging.set_verbosity_info() lowerCAmelCase : List[Any] =logging.get_logger(__name__) lowerCAmelCase : Optional[Any] ={ "post_extract_proj": "feature_projection.projection", "encoder.pos_conv.0": "encoder.pos_conv_embed.conv", "self_attn.k_proj": "encoder.layers.*.attention.k_proj", "self_attn.v_proj": "encoder.layers.*.attention.v_proj", "self_attn.q_proj": "encoder.layers.*.attention.q_proj", "self_attn.out_proj": "encoder.layers.*.attention.out_proj", "self_attn_layer_norm": "encoder.layers.*.layer_norm", "fc1": "encoder.layers.*.feed_forward.intermediate_dense", "fc2": "encoder.layers.*.feed_forward.output_dense", "final_layer_norm": "encoder.layers.*.final_layer_norm", "encoder.layer_norm": "encoder.layer_norm", "w2v_model.layer_norm": "feature_projection.layer_norm", "quantizer.weight_proj": "quantizer.weight_proj", "quantizer.vars": "quantizer.codevectors", "project_q": "project_q", "final_proj": "project_hid", "w2v_encoder.proj": "lm_head", "mask_emb": "masked_spec_embed", } lowerCAmelCase : List[str] =[ "lm_head", "quantizer.weight_proj", "quantizer.codevectors", "project_q", "project_hid", ] def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' for attribute in key.split(""".""" ): lowerCAmelCase : Any = getattr(lowerCamelCase__ ,lowerCamelCase__ ) if weight_type is not None: lowerCAmelCase : List[str] = getattr(lowerCamelCase__ ,lowerCamelCase__ ).shape else: lowerCAmelCase : str = hf_pointer.shape assert hf_shape == value.shape, ( F"""Shape of hf {key + '.' + weight_type if weight_type is not None else ''} is {hf_shape}, but should be""" F""" {value.shape} for {full_name}""" ) if weight_type == "weight": lowerCAmelCase : List[str] = value elif weight_type == "weight_g": lowerCAmelCase : List[Any] = value elif weight_type == "weight_v": lowerCAmelCase : Dict = value elif weight_type == "bias": lowerCAmelCase : int = value else: lowerCAmelCase : int = value logger.info(F"""{key + '.' + weight_type if weight_type is not None else ''} was initialized from {full_name}.""" ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = [] lowerCAmelCase : List[Any] = fairseq_model.state_dict() lowerCAmelCase : Union[str, Any] = hf_model.feature_extractor lowerCAmelCase : Dict = hf_model.adapter for name, value in fairseq_dict.items(): lowerCAmelCase : str = False if "conv_layers" in name: load_conv_layer( lowerCamelCase__ ,lowerCamelCase__ ,lowerCamelCase__ ,lowerCamelCase__ ,hf_model.config.feat_extract_norm == """group""" ,) lowerCAmelCase : int = True elif any(x in name for x in ["""adaptor""", """w2v_encoder.proj.""", """w2v_proj_ln."""] ): load_adapter(lowerCamelCase__ ,lowerCamelCase__ ,lowerCamelCase__ ,lowerCamelCase__ ) lowerCAmelCase : int = True else: for key, mapped_key in MAPPING.items(): if key in name or key.split("""w2v_model.""" )[-1] == name.split(""".""" )[0]: lowerCAmelCase : Union[str, Any] = True if "*" in mapped_key: lowerCAmelCase : int = name.split(lowerCamelCase__ )[0].split(""".""" )[-2] lowerCAmelCase : Any = mapped_key.replace("""*""" ,lowerCamelCase__ ) if "weight_g" in name: lowerCAmelCase : Dict = "weight_g" elif "weight_v" in name: lowerCAmelCase : Optional[int] = "weight_v" elif "bias" in name: lowerCAmelCase : Union[str, Any] = "bias" elif "weight" in name: lowerCAmelCase : Optional[Any] = "weight" else: lowerCAmelCase : Dict = None set_recursively(lowerCamelCase__ ,lowerCamelCase__ ,lowerCamelCase__ ,lowerCamelCase__ ,lowerCamelCase__ ) continue if not is_used: unused_weights.append(lowerCamelCase__ ) logger.warning(F"""Unused weights: {unused_weights}""" ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : List[Any] = full_name.split("""conv_layers.""" )[-1] lowerCAmelCase : Dict = name.split(""".""" ) lowerCAmelCase : List[Any] = int(items[0] ) lowerCAmelCase : Optional[Any] = int(items[1] ) if type_id == 0: if "bias" in name: assert value.shape == feature_extractor.conv_layers[layer_id].conv.bias.data.shape, ( F"""{full_name} has size {value.shape}, but""" F""" {feature_extractor.conv_layers[layer_id].conv.bias.data.shape} was found.""" ) lowerCAmelCase : int = value logger.info(F"""Feat extract conv layer {layer_id} was initialized from {full_name}.""" ) elif "weight" in name: assert value.shape == feature_extractor.conv_layers[layer_id].conv.weight.data.shape, ( F"""{full_name} has size {value.shape}, but""" F""" {feature_extractor.conv_layers[layer_id].conv.weight.data.shape} was found.""" ) lowerCAmelCase : List[str] = value logger.info(F"""Feat extract conv layer {layer_id} was initialized from {full_name}.""" ) elif (type_id == 2 and not use_group_norm) or (type_id == 2 and layer_id == 0 and use_group_norm): if "bias" in name: assert value.shape == feature_extractor.conv_layers[layer_id].layer_norm.bias.data.shape, ( F"""{full_name} has size {value.shape}, but {feature_extractor[layer_id].layer_norm.bias.data.shape} was""" " found." ) lowerCAmelCase : int = value logger.info(F"""Feat extract layer norm weight of layer {layer_id} was initialized from {full_name}.""" ) elif "weight" in name: assert value.shape == feature_extractor.conv_layers[layer_id].layer_norm.weight.data.shape, ( F"""{full_name} has size {value.shape}, but""" F""" {feature_extractor[layer_id].layer_norm.weight.data.shape} was found.""" ) lowerCAmelCase : int = value logger.info(F"""Feat extract layer norm weight of layer {layer_id} was initialized from {full_name}.""" ) else: unused_weights.append(lowerCamelCase__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = full_name.split("""adaptor.""" )[-1] lowerCAmelCase : Optional[int] = name.split(""".""" ) if items[1].isdigit(): lowerCAmelCase : Union[str, Any] = int(items[1] ) else: lowerCAmelCase : Optional[int] = None if "adaptor" not in full_name: if "proj_ln" in full_name: # has to be layer norm if "bias" in name: assert ( value.shape == adapter.proj_layer_norm.bias.data.shape ), F"""{full_name} has size {value.shape}, but {adapter.proj_layer_norm.bias.data.shape} was found.""" lowerCAmelCase : Optional[Any] = value logger.info(F"""Adapter proj layer norm bias was initialized from {full_name}.""" ) if "weight" in name: assert ( value.shape == adapter.proj_layer_norm.weight.data.shape ), F"""{full_name} has size {value.shape}, but {adapter.proj_layer_norm.weight.data.shape} was found.""" lowerCAmelCase : List[Any] = value else: # has to be projection layer if "bias" in name: assert ( value.shape == adapter.proj.bias.data.shape ), F"""{full_name} has size {value.shape}, but {adapter.proj.bias.data.shape} was found.""" lowerCAmelCase : str = value logger.info(F"""Adapter proj layer bias was initialized from {full_name}.""" ) if "weight" in name: assert ( value.shape == adapter.proj.weight.data.shape ), F"""{full_name} has size {value.shape}, but {adapter.proj.weight.data.shape} was found.""" lowerCAmelCase : List[str] = value logger.info(F"""Adapter proj layer weight was initialized from {full_name}.""" ) elif isinstance(lowerCamelCase__ ,lowerCamelCase__ ): if "bias" in name: assert ( value.shape == adapter.layers[layer_id].conv.bias.data.shape ), F"""{full_name} has size {value.shape}, but {adapter.layers[layer_id].conv.bias.data.shape} was found.""" lowerCAmelCase : List[Any] = value logger.info(F"""Adapter layer {layer_id} bias was initialized from {full_name}.""" ) elif "weight" in name: assert ( value.shape == adapter.layers[layer_id].conv.weight.data.shape ), F"""{full_name} has size {value.shape}, but {adapter.layers[layer_id].conv.weight.data.shape} was found.""" lowerCAmelCase : Dict = value logger.info(F"""Adapter layer {layer_id} bias was initialized from {full_name}.""" ) else: unused_weights.append(lowerCamelCase__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = emb.weight.shape lowerCAmelCase : str = nn.Linear(lowerCamelCase__ ,lowerCamelCase__ ,bias=lowerCamelCase__ ) lowerCAmelCase : Dict = emb.weight.data return lin_layer @torch.no_grad() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,): '''simple docstring''' lowerCAmelCase : List[Any] = WavaVecaConfig.from_pretrained( lowerCamelCase__ ,add_adapter=lowerCamelCase__ ,adapter_stride=lowerCamelCase__ ,adapter_kernel_size=lowerCamelCase__ ,use_auth_token=lowerCamelCase__ ,output_hidden_size=lowerCamelCase__ ,) lowerCAmelCase : int = MBartConfig.from_pretrained(lowerCamelCase__ ) # load model lowerCAmelCase : int = fairseq.checkpoint_utils.load_model_ensemble_and_task( [checkpoint_path] ,arg_overrides={ """config_yaml""": config_yaml_path, """data""": """/""".join(dict_path.split("""/""" )[:-1] ), """w2v_path""": checkpoint_path, """load_pretrained_decoder_from""": None, } ,) lowerCAmelCase : int = model[0].eval() # load feature extractor lowerCAmelCase : List[str] = WavaVecaFeatureExtractor.from_pretrained(lowerCamelCase__ ,use_auth_token=lowerCamelCase__ ) # set weights for wav2vec2 encoder lowerCAmelCase : Tuple = WavaVecaModel(lowerCamelCase__ ) recursively_load_weights_wavaveca(model.encoder ,lowerCamelCase__ ) # load decoder weights lowerCAmelCase : List[Any] = MBartForCausalLM(lowerCamelCase__ ) lowerCAmelCase : Dict = hf_decoder.model.decoder.load_state_dict(model.decoder.state_dict() ,strict=lowerCamelCase__ ) logger.warning(F"""The following keys are missing when loading the decoder weights: {missing_keys}""" ) logger.warning(F"""The following keys are unexpected when loading the decoder weights: {unexpected_keys}""" ) lowerCAmelCase : int = SpeechEncoderDecoderModel(encoder=lowerCamelCase__ ,decoder=lowerCamelCase__ ) lowerCAmelCase : List[str] = False lowerCAmelCase : Union[str, Any] = MBartaaTokenizer(lowerCamelCase__ ) tokenizer.save_pretrained(lowerCamelCase__ ) lowerCAmelCase : Union[str, Any] = hf_wavavec.config.to_dict() lowerCAmelCase : int = tokenizer.pad_token_id lowerCAmelCase : Union[str, Any] = tokenizer.bos_token_id lowerCAmelCase : Union[str, Any] = tokenizer.eos_token_id lowerCAmelCase : Tuple = "mbart50" lowerCAmelCase : Tuple = "wav2vec2" lowerCAmelCase : int = tokenizer.eos_token_id lowerCAmelCase : Any = 2_5_0_0_0_4 lowerCAmelCase : int = tokenizer.eos_token_id lowerCAmelCase : Union[str, Any] = SpeechEncoderDecoderConfig.from_dict(lowerCamelCase__ ) hf_wavavec.save_pretrained(lowerCamelCase__ ) feature_extractor.save_pretrained(lowerCamelCase__ ) if __name__ == "__main__": lowerCAmelCase : Dict =argparse.ArgumentParser() parser.add_argument('--pytorch_dump_folder_path', default=None, type=str, help='Path to the output PyTorch model.') parser.add_argument('--checkpoint_path', default=None, type=str, help='Path to fairseq checkpoint') parser.add_argument('--dict_path', default=None, type=str, help='Path to dict of fine-tuned model') parser.add_argument('--config_yaml_path', default=None, type=str, help='Path to yaml file of fine-tuned model') parser.add_argument( '--encoder_config_path', default='facebook/wav2vec2-xls-r-1b', type=str, help='Path to hf encoder wav2vec2 checkpoint config', ) parser.add_argument( '--decoder_config_path', default='facebook/mbart-large-50-one-to-many-mmt', type=str, help='Path to hf decoder checkpoint config', ) parser.add_argument('--add_adapter', default=True, type=bool, help='whethere to add model adapter layers') parser.add_argument('--adapter_stride', default=2, type=int, help='stride of adapter layers') parser.add_argument('--adapter_kernel_size', default=3, type=int, help='kernel size of adapter layers') parser.add_argument('--encoder_output_dim', default=1_024, type=int, help='encoder output dim') parser.add_argument('--start_token_id', default=250_004, type=int, help='`decoder_start_token_id` of model config') lowerCAmelCase : str =parser.parse_args() convert_wavaveca_checkpoint( args.checkpoint_path, args.pytorch_dump_folder_path, args.dict_path, args.config_yaml_path, encoder_config_path=args.encoder_config_path, decoder_config_path=args.decoder_config_path, add_adapter=args.add_adapter, adapter_kernel_size=args.adapter_kernel_size, adapter_stride=args.adapter_stride, decoder_start_token_id=args.start_token_id, encoder_output_dim=args.encoder_output_dim, )
710
import json import logging import os import sys from pathlib import Path import finetune_rag from transformers.file_utils import is_apex_available from transformers.testing_utils import ( TestCasePlus, execute_subprocess_async, require_ray, require_torch_gpu, require_torch_multi_gpu, ) logging.basicConfig(level=logging.DEBUG) lowerCAmelCase : int =logging.getLogger() lowerCAmelCase : str =logging.StreamHandler(sys.stdout) logger.addHandler(stream_handler) class _a ( snake_case_ ): def _snake_case ( self , lowercase_ ) -> List[Any]: os.makedirs(lowercase_ , exist_ok=lowercase_ ) lowerCAmelCase : int = {"""source""": """What is love ?""", """target""": """life"""} lowerCAmelCase : Optional[Any] = {"""train""": 12, """val""": 2, """test""": 2} for split in ["train", "test", "val"]: for field in ["source", "target"]: lowerCAmelCase : Tuple = """\n""".join([contents[field]] * n_lines[split] ) with open(os.path.join(lowercase_ , f"""{split}.{field}""" ) , """w""" ) as f: f.write(lowercase_ ) def _snake_case ( self , lowercase_ , lowercase_ = "pytorch" ) -> str: lowerCAmelCase : Dict = self.get_auto_remove_tmp_dir() lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """output""" ) lowerCAmelCase : Dict = os.path.join(lowercase_ , """data""" ) self._create_dummy_data(data_dir=lowercase_ ) lowerCAmelCase : str = f""" --data_dir {data_dir} \ --output_dir {output_dir} \ --model_name_or_path facebook/rag-sequence-base \ --model_type rag_sequence \ --do_train \ --do_predict \ --n_val -1 \ --val_check_interval 1.0 \ --train_batch_size 2 \ --eval_batch_size 1 \ --max_source_length 25 \ --max_target_length 25 \ --val_max_target_length 25 \ --test_max_target_length 25 \ --label_smoothing 0.1 \ --dropout 0.1 \ --attention_dropout 0.1 \ --weight_decay 0.001 \ --adam_epsilon 1e-08 \ --max_grad_norm 0.1 \ --lr_scheduler polynomial \ --learning_rate 3e-04 \ --num_train_epochs 1 \ --warmup_steps 4 \ --gradient_accumulation_steps 1 \ --distributed-port 8787 \ --use_dummy_dataset 1 \ --distributed_retriever {distributed_retriever} \ """.split() if gpus > 0: testargs.append(f"""--gpus={gpus}""" ) if is_apex_available(): testargs.append("""--fp16""" ) else: testargs.append("""--gpus=0""" ) testargs.append("""--distributed_backend=ddp_cpu""" ) testargs.append("""--num_processes=2""" ) lowerCAmelCase : Optional[int] = [sys.executable, str(Path(finetune_rag.__file__ ).resolve() )] + testargs execute_subprocess_async(lowercase_ , env=self.get_env() ) lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """metrics.json""" ) with open(lowercase_ ) as f: lowerCAmelCase : List[str] = json.load(lowercase_ ) return result @require_torch_gpu def _snake_case ( self ) -> Any: lowerCAmelCase : Tuple = self._run_finetune(gpus=1 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self._run_finetune(gpus=2 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_gpu @require_ray def _snake_case ( self ) -> int: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu @require_ray def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 )
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' for i in range(0 ,_lowerCAmelCase ): for _ in range(0 ,n - i - 1 ): # printing spaces print(""" """ ,end="""""" ) for _ in range(0 ,i + 1 ): # printing stars print("""* """ ,end="""""" ) print() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' for i in range(_lowerCAmelCase ,0 ,-1 ): for _ in range(_lowerCAmelCase ,0 ,-1 ): # printing stars print("""* """ ,end="""""" ) print() for _ in range(n - i + 1 ,0 ,-1 ): # printing spaces print(""" """ ,end="""""" ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if n <= 0: print(""" ... .... nothing printing :(""" ) return floyd(_lowerCAmelCase ) # upper half reverse_floyd(_lowerCAmelCase ) # lower half if __name__ == "__main__": print(r'| /\ | |- | |- |--| |\ /| |-') print(r'|/ \| |- |_ |_ |__| | \/ | |_') lowerCAmelCase : Any =1 while K: lowerCAmelCase : Optional[Any] =int(input('enter the number and , and see the magic : ')) print() pretty_print(user_number) lowerCAmelCase : str =int(input('press 0 to exit... and 1 to continue...')) print('Good Bye...')
711
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Optional[int] ={ 'transfo-xl-wt103': 'https://huggingface.co/transfo-xl-wt103/resolve/main/config.json', } class _a ( snake_case_ ): _UpperCamelCase: Tuple = "transfo-xl" _UpperCamelCase: str = ["mems"] _UpperCamelCase: Dict = { "n_token": "vocab_size", "hidden_size": "d_model", "num_attention_heads": "n_head", "num_hidden_layers": "n_layer", } def __init__( self , lowercase_=267735 , lowercase_=[20000, 40000, 200000] , lowercase_=1024 , lowercase_=1024 , lowercase_=16 , lowercase_=64 , lowercase_=4096 , lowercase_=4 , lowercase_=False , lowercase_=18 , lowercase_=1600 , lowercase_=1000 , lowercase_=True , lowercase_=True , lowercase_=0 , lowercase_=-1 , lowercase_=True , lowercase_=0.1 , lowercase_=0.0 , lowercase_=True , lowercase_="normal" , lowercase_=0.0_1 , lowercase_=0.0_1 , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=0 , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : Union[str, Any] = [] self.cutoffs.extend(lowercase_ ) if proj_share_all_but_first: lowerCAmelCase : Optional[int] = [False] + [True] * len(self.cutoffs ) else: lowerCAmelCase : List[str] = [False] + [False] * len(self.cutoffs ) lowerCAmelCase : Optional[int] = d_model lowerCAmelCase : List[Any] = d_embed lowerCAmelCase : Union[str, Any] = d_head lowerCAmelCase : List[Any] = d_inner lowerCAmelCase : Optional[int] = div_val lowerCAmelCase : List[Any] = pre_lnorm lowerCAmelCase : Dict = n_layer lowerCAmelCase : Tuple = n_head lowerCAmelCase : Any = mem_len lowerCAmelCase : Union[str, Any] = same_length lowerCAmelCase : List[Any] = attn_type lowerCAmelCase : int = clamp_len lowerCAmelCase : List[str] = sample_softmax lowerCAmelCase : Optional[int] = adaptive lowerCAmelCase : Dict = dropout lowerCAmelCase : Optional[Any] = dropatt lowerCAmelCase : List[str] = untie_r lowerCAmelCase : List[str] = init lowerCAmelCase : Tuple = init_range lowerCAmelCase : str = proj_init_std lowerCAmelCase : str = init_std lowerCAmelCase : Optional[int] = layer_norm_epsilon super().__init__(eos_token_id=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> Optional[Any]: # Message copied from Transformer-XL documentation logger.info(f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" ) return -1 @max_position_embeddings.setter def _snake_case ( self , lowercase_ ) -> Dict: # Message copied from Transformer-XL documentation raise NotImplementedError( f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" )
693
0
from unittest import TestCase from datasets import Sequence, Value from datasets.arrow_dataset import Dataset class _a ( UpperCamelCase_ ): def _snake_case ( self ) -> Union[str, Any]: return [ {"col_1": 3, "col_2": "a"}, {"col_1": 2, "col_2": "b"}, {"col_1": 1, "col_2": "c"}, {"col_1": 0, "col_2": "d"}, ] def _snake_case ( self ) -> Any: lowerCAmelCase : Optional[int] = {"""col_1""": [3, 2, 1, 0], """col_2""": ["""a""", """b""", """c""", """d"""]} return Dataset.from_dict(_a ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : str = self._create_example_records() lowerCAmelCase : Tuple = Dataset.from_list(_a ) self.assertListEqual(dset.column_names , ["""col_1""", """col_2"""] ) for i, r in enumerate(_a ): self.assertDictEqual(_a , example_records[i] ) def _snake_case ( self ) -> Any: lowerCAmelCase : List[str] = self._create_example_records() lowerCAmelCase : Tuple = Dataset.from_list(_a ) lowerCAmelCase : Any = Dataset.from_dict({k: [r[k] for r in example_records] for k in example_records[0]} ) self.assertEqual(dset.info , dset_from_dict.info ) def _snake_case ( self ) -> Dict: # checks what happens with missing columns lowerCAmelCase : List[str] = [{"""col_1""": 1}, {"""col_2""": """x"""}] lowerCAmelCase : Optional[int] = Dataset.from_list(_a ) self.assertDictEqual(dset[0] , {"""col_1""": 1} ) self.assertDictEqual(dset[1] , {"""col_1""": None} ) # NB: first record is used for columns def _snake_case ( self ) -> str: # checks if the type can be inferred from the second record lowerCAmelCase : Union[str, Any] = [{"""col_1""": []}, {"""col_1""": [1, 2]}] lowerCAmelCase : List[str] = Dataset.from_list(_a ) self.assertEqual(dset.info.features["""col_1"""] , Sequence(Value("""int64""" ) ) ) def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Union[str, Any] = Dataset.from_list([] ) self.assertEqual(len(_a ) , 0 ) self.assertListEqual(dset.column_names , [] )
712
import torch from diffusers import DiffusionPipeline class _a ( snake_case_ ): def __init__( self , lowercase_ , lowercase_ ) -> int: super().__init__() self.register_modules(unet=lowercase_ , scheduler=lowercase_ ) def __call__( self ) -> List[Any]: lowerCAmelCase : Union[str, Any] = torch.randn( (1, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size) , ) lowerCAmelCase : Union[str, Any] = 1 lowerCAmelCase : Dict = self.unet(lowercase_ , lowercase_ ).sample lowerCAmelCase : str = self.scheduler.step(lowercase_ , lowercase_ , lowercase_ ).prev_sample lowerCAmelCase : Dict = scheduler_output - scheduler_output + torch.ones_like(lowercase_ ) return result
693
0
import os from typing import Any, Callable, Dict, List, Optional, Tuple, Union import torch from torch import nn from ...models.controlnet import ControlNetModel, ControlNetOutput from ...models.modeling_utils import ModelMixin from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) class _a ( _snake_case ): def __init__( self , lowercase_ ) -> Dict: super().__init__() lowerCAmelCase : List[str] = nn.ModuleList(lowercase_ ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = False , lowercase_ = True , ) -> str: for i, (image, scale, controlnet) in enumerate(zip(lowercase_ , lowercase_ , self.nets ) ): lowerCAmelCase , lowerCAmelCase : Tuple = controlnet( lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , ) # merge samples if i == 0: lowerCAmelCase , lowerCAmelCase : Optional[int] = down_samples, mid_sample else: lowerCAmelCase : List[Any] = [ samples_prev + samples_curr for samples_prev, samples_curr in zip(lowercase_ , lowercase_ ) ] mid_block_res_sample += mid_sample return down_block_res_samples, mid_block_res_sample def _snake_case ( self , lowercase_ , lowercase_ = True , lowercase_ = None , lowercase_ = False , lowercase_ = None , ) -> str: lowerCAmelCase : str = 0 lowerCAmelCase : int = save_directory for controlnet in self.nets: controlnet.save_pretrained( lowercase_ , is_main_process=lowercase_ , save_function=lowercase_ , safe_serialization=lowercase_ , variant=lowercase_ , ) idx += 1 lowerCAmelCase : str = model_path_to_save + f"""_{idx}""" @classmethod def _snake_case ( cls , lowercase_ , **lowercase_ ) -> int: lowerCAmelCase : int = 0 lowerCAmelCase : List[Any] = [] # load controlnet and append to list until no controlnet directory exists anymore # first controlnet has to be saved under `./mydirectory/controlnet` to be compliant with `DiffusionPipeline.from_prertained` # second, third, ... controlnets have to be saved under `./mydirectory/controlnet_1`, `./mydirectory/controlnet_2`, ... lowerCAmelCase : Dict = pretrained_model_path while os.path.isdir(lowercase_ ): lowerCAmelCase : List[str] = ControlNetModel.from_pretrained(lowercase_ , **lowercase_ ) controlnets.append(lowercase_ ) idx += 1 lowerCAmelCase : Any = pretrained_model_path + f"""_{idx}""" logger.info(f"""{len(lowercase_ )} controlnets loaded from {pretrained_model_path}.""" ) if len(lowercase_ ) == 0: raise ValueError( f"""No ControlNets found under {os.path.dirname(lowercase_ )}. Expected at least {pretrained_model_path + '_0'}.""" ) return cls(lowercase_ )
713
import pytest import requests from datasets.utils.file_utils import http_head from .utils import OfflineSimulationMode, RequestWouldHangIndefinitelyError, offline @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_TIMES_OUT ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): requests.request("""GET""" ,"""https://huggingface.co""" ) with pytest.raises(requests.exceptions.ConnectTimeout ): requests.request("""GET""" ,"""https://huggingface.co""" ,timeout=1.0 ) @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_FAILS ): with pytest.raises(requests.exceptions.ConnectionError ): requests.request("""GET""" ,"""https://huggingface.co""" ) def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.HF_DATASETS_OFFLINE_SET_TO_1 ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): http_head("""https://huggingface.co""" )
693
0
import argparse import json import os import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedType from accelerate.utils.deepspeed import DummyOptim, DummyScheduler lowerCAmelCase : int =16 lowerCAmelCase : Optional[int] =32 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = 1_6 ,SCREAMING_SNAKE_CASE__ = "bert-base-cased" ): '''simple docstring''' lowerCAmelCase : Tuple = AutoTokenizer.from_pretrained(lowerCAmelCase__ ) lowerCAmelCase : Dict = load_dataset("""glue""" ,"""mrpc""" ) def tokenize_function(SCREAMING_SNAKE_CASE__ ): # max_length=None => use the model max length (it's actually the default) lowerCAmelCase : List[Any] = tokenizer(examples["""sentence1"""] ,examples["""sentence2"""] ,truncation=lowerCAmelCase__ ,max_length=lowerCAmelCase__ ) return outputs # Apply the method we just defined to all the examples in all the splits of the dataset lowerCAmelCase : Dict = datasets.map( lowerCAmelCase__ ,batched=lowerCAmelCase__ ,remove_columns=["""idx""", """sentence1""", """sentence2"""] ,load_from_cache_file=lowerCAmelCase__ ) # We also rename the 'label' column to 'labels' which is the expected name for labels by the models of the # transformers library lowerCAmelCase : List[str] = tokenized_datasets.rename_column("""label""" ,"""labels""" ) def collate_fn(SCREAMING_SNAKE_CASE__ ): # On TPU it's best to pad everything to the same length or training will be very slow. if accelerator.distributed_type == DistributedType.TPU: return tokenizer.pad(lowerCAmelCase__ ,padding="""max_length""" ,max_length=1_2_8 ,return_tensors="""pt""" ) return tokenizer.pad(lowerCAmelCase__ ,padding="""longest""" ,return_tensors="""pt""" ) # Instantiate dataloaders. lowerCAmelCase : Any = DataLoader( tokenized_datasets["""train"""] ,shuffle=lowerCAmelCase__ ,collate_fn=lowerCAmelCase__ ,batch_size=lowerCAmelCase__ ) lowerCAmelCase : List[Any] = DataLoader( tokenized_datasets["""validation"""] ,shuffle=lowerCAmelCase__ ,collate_fn=lowerCAmelCase__ ,batch_size=lowerCAmelCase__ ) return train_dataloader, eval_dataloader def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' model.eval() lowerCAmelCase : Optional[int] = 0 for step, batch in enumerate(lowerCAmelCase__ ): # We could avoid this line since we set the accelerator with `device_placement=True`. batch.to(accelerator.device ) with torch.no_grad(): lowerCAmelCase : List[str] = model(**lowerCAmelCase__ ) lowerCAmelCase : Optional[Any] = outputs.logits.argmax(dim=-1 ) # It is slightly faster to call this once, than multiple times lowerCAmelCase , lowerCAmelCase : str = accelerator.gather( (predictions, batch["""labels"""]) ) # If we are in a multiprocess environment, the last batch has duplicates if accelerator.use_distributed: if step == len(lowerCAmelCase__ ) - 1: lowerCAmelCase : int = predictions[: len(eval_dataloader.dataset ) - samples_seen] lowerCAmelCase : Optional[int] = references[: len(eval_dataloader.dataset ) - samples_seen] else: samples_seen += references.shape[0] metric.add_batch( predictions=lowerCAmelCase__ ,references=lowerCAmelCase__ ,) lowerCAmelCase : Optional[Any] = metric.compute() return eval_metric["accuracy"] def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : int = Accelerator() # Sample hyper-parameters for learning rate, batch size, seed and a few other HPs lowerCAmelCase : List[str] = config["""lr"""] lowerCAmelCase : Optional[int] = int(config["""num_epochs"""] ) lowerCAmelCase : Dict = int(config["""seed"""] ) lowerCAmelCase : str = int(config["""batch_size"""] ) lowerCAmelCase : Any = args.model_name_or_path set_seed(lowerCAmelCase__ ) lowerCAmelCase , lowerCAmelCase : Union[str, Any] = get_dataloaders(lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ) # Instantiate the model (we build the model here so that the seed also control new weights initialization) lowerCAmelCase : str = AutoModelForSequenceClassification.from_pretrained(lowerCAmelCase__ ,return_dict=lowerCAmelCase__ ) # Instantiate optimizer lowerCAmelCase : Union[str, Any] = ( AdamW if accelerator.state.deepspeed_plugin is None or """optimizer""" not in accelerator.state.deepspeed_plugin.deepspeed_config else DummyOptim ) lowerCAmelCase : str = optimizer_cls(params=model.parameters() ,lr=lowerCAmelCase__ ) if accelerator.state.deepspeed_plugin is not None: lowerCAmelCase : Tuple = accelerator.state.deepspeed_plugin.deepspeed_config[ """gradient_accumulation_steps""" ] else: lowerCAmelCase : Tuple = 1 lowerCAmelCase : Optional[int] = (len(lowerCAmelCase__ ) * num_epochs) // gradient_accumulation_steps # Instantiate scheduler if ( accelerator.state.deepspeed_plugin is None or "scheduler" not in accelerator.state.deepspeed_plugin.deepspeed_config ): lowerCAmelCase : Union[str, Any] = get_linear_schedule_with_warmup( optimizer=lowerCAmelCase__ ,num_warmup_steps=0 ,num_training_steps=lowerCAmelCase__ ,) else: lowerCAmelCase : List[Any] = DummyScheduler(lowerCAmelCase__ ,total_num_steps=lowerCAmelCase__ ,warmup_num_steps=0 ) # Prepare everything # There is no specific order to remember, we just need to unpack the objects in the same order we gave them to the # prepare method. lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Optional[Any] = accelerator.prepare( lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ) # We need to keep track of how many total steps we have iterated over lowerCAmelCase : Tuple = 0 # We also need to keep track of the stating epoch so files are named properly lowerCAmelCase : Union[str, Any] = 0 lowerCAmelCase : Union[str, Any] = evaluate.load("""glue""" ,"""mrpc""" ) lowerCAmelCase : Tuple = num_epochs if args.partial_train_epoch is not None: lowerCAmelCase : int = args.partial_train_epoch if args.resume_from_checkpoint: accelerator.load_state(args.resume_from_checkpoint ) lowerCAmelCase : Optional[int] = args.resume_from_checkpoint.split("""epoch_""" )[1] lowerCAmelCase : List[Any] = """""" for char in epoch_string: if char.isdigit(): state_epoch_num += char else: break lowerCAmelCase : Tuple = int(lowerCAmelCase__ ) + 1 lowerCAmelCase : Union[str, Any] = evaluation_loop(lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ) accelerator.print("""resumed checkpoint performance:""" ,lowerCAmelCase__ ) accelerator.print("""resumed checkpoint's scheduler's lr:""" ,lr_scheduler.get_lr()[0] ) accelerator.print("""resumed optimizers's lr:""" ,optimizer.param_groups[0]["""lr"""] ) with open(os.path.join(args.output_dir ,F"""state_{starting_epoch-1}.json""" ) ,"""r""" ) as f: lowerCAmelCase : List[str] = json.load(lowerCAmelCase__ ) assert resumed_state["accuracy"] == accuracy, "Accuracy mismatch, loading from checkpoint failed" assert ( resumed_state["lr"] == lr_scheduler.get_lr()[0] ), "Scheduler learning rate mismatch, loading from checkpoint failed" assert ( resumed_state["optimizer_lr"] == optimizer.param_groups[0]["lr"] ), "Optimizer learning rate mismatch, loading from checkpoint failed" assert resumed_state["epoch"] == starting_epoch - 1, "Epoch mismatch, loading from checkpoint failed" return # Now we train the model lowerCAmelCase : List[Any] = {} for epoch in range(lowerCAmelCase__ ,lowerCAmelCase__ ): model.train() for step, batch in enumerate(lowerCAmelCase__ ): lowerCAmelCase : Optional[int] = model(**lowerCAmelCase__ ) lowerCAmelCase : Any = outputs.loss lowerCAmelCase : List[Any] = loss / gradient_accumulation_steps accelerator.backward(lowerCAmelCase__ ) if step % gradient_accumulation_steps == 0: optimizer.step() lr_scheduler.step() optimizer.zero_grad() overall_step += 1 lowerCAmelCase : Optional[int] = F"""epoch_{epoch}""" lowerCAmelCase : List[Any] = os.path.join(args.output_dir ,lowerCAmelCase__ ) accelerator.save_state(lowerCAmelCase__ ) lowerCAmelCase : Optional[Any] = evaluation_loop(lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ,lowerCAmelCase__ ) lowerCAmelCase : List[str] = accuracy lowerCAmelCase : List[str] = lr_scheduler.get_lr()[0] lowerCAmelCase : Optional[int] = optimizer.param_groups[0]["""lr"""] lowerCAmelCase : int = epoch lowerCAmelCase : Optional[Any] = overall_step accelerator.print(F"""epoch {epoch}:""" ,lowerCAmelCase__ ) accelerator.wait_for_everyone() if accelerator.is_main_process: with open(os.path.join(args.output_dir ,F"""state_{epoch}.json""" ) ,"""w""" ) as f: json.dump(lowerCAmelCase__ ,lowerCAmelCase__ ) def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Optional[int] = argparse.ArgumentParser(description="""Simple example of training script tracking peak GPU memory usage.""" ) parser.add_argument( """--model_name_or_path""" ,type=lowerCAmelCase__ ,default="""bert-base-cased""" ,help="""Path to pretrained model or model identifier from huggingface.co/models.""" ,required=lowerCAmelCase__ ,) parser.add_argument( """--output_dir""" ,type=lowerCAmelCase__ ,default=""".""" ,help="""Optional save directory where all checkpoint folders will be stored. Default is the current working directory.""" ,) parser.add_argument( """--resume_from_checkpoint""" ,type=lowerCAmelCase__ ,default=lowerCAmelCase__ ,help="""If the training should continue from a checkpoint folder.""" ,) parser.add_argument( """--partial_train_epoch""" ,type=lowerCAmelCase__ ,default=lowerCAmelCase__ ,help="""If passed, the training will stop after this number of epochs.""" ,) parser.add_argument( """--num_epochs""" ,type=lowerCAmelCase__ ,default=2 ,help="""Number of train epochs.""" ,) lowerCAmelCase : Dict = parser.parse_args() lowerCAmelCase : List[Any] = {"""lr""": 2e-5, """num_epochs""": args.num_epochs, """seed""": 4_2, """batch_size""": 1_6} training_function(lowerCAmelCase__ ,lowerCAmelCase__ ) if __name__ == "__main__": main()
714
import json import pathlib import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision, slow from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import DetrImageProcessor class _a ( unittest.TestCase ): def __init__( self , lowercase_ , lowercase_=7 , lowercase_=3 , lowercase_=30 , lowercase_=400 , lowercase_=True , lowercase_=None , lowercase_=True , lowercase_=1 / 255 , lowercase_=True , lowercase_=[0.5, 0.5, 0.5] , lowercase_=[0.5, 0.5, 0.5] , lowercase_=True , ) -> Tuple: # by setting size["longest_edge"] > max_resolution we're effectively not testing this :p lowerCAmelCase : Optional[Any] = size if size is not None else {"""shortest_edge""": 18, """longest_edge""": 1333} lowerCAmelCase : Optional[int] = parent lowerCAmelCase : Optional[int] = batch_size lowerCAmelCase : Dict = num_channels lowerCAmelCase : str = min_resolution lowerCAmelCase : Optional[Any] = max_resolution lowerCAmelCase : Optional[int] = do_resize lowerCAmelCase : List[str] = size lowerCAmelCase : Dict = do_rescale lowerCAmelCase : Union[str, Any] = rescale_factor lowerCAmelCase : int = do_normalize lowerCAmelCase : Union[str, Any] = image_mean lowerCAmelCase : Dict = image_std lowerCAmelCase : Optional[int] = do_pad def _snake_case ( self ) -> Any: return { "do_resize": self.do_resize, "size": self.size, "do_rescale": self.do_rescale, "rescale_factor": self.rescale_factor, "do_normalize": self.do_normalize, "image_mean": self.image_mean, "image_std": self.image_std, "do_pad": self.do_pad, } def _snake_case ( self , lowercase_ , lowercase_=False ) -> List[Any]: if not batched: lowerCAmelCase : Tuple = image_inputs[0] if isinstance(lowercase_ , Image.Image ): lowerCAmelCase , lowerCAmelCase : Dict = image.size else: lowerCAmelCase , lowerCAmelCase : Tuple = image.shape[1], image.shape[2] if w < h: lowerCAmelCase : Union[str, Any] = int(self.size["""shortest_edge"""] * h / w ) lowerCAmelCase : Optional[Any] = self.size["""shortest_edge"""] elif w > h: lowerCAmelCase : List[Any] = self.size["""shortest_edge"""] lowerCAmelCase : List[Any] = int(self.size["""shortest_edge"""] * w / h ) else: lowerCAmelCase : Optional[int] = self.size["""shortest_edge"""] lowerCAmelCase : List[str] = self.size["""shortest_edge"""] else: lowerCAmelCase : Optional[int] = [] for image in image_inputs: lowerCAmelCase , lowerCAmelCase : int = self.get_expected_values([image] ) expected_values.append((expected_height, expected_width) ) lowerCAmelCase : Any = max(lowercase_ , key=lambda lowercase_ : item[0] )[0] lowerCAmelCase : Union[str, Any] = max(lowercase_ , key=lambda lowercase_ : item[1] )[1] return expected_height, expected_width @require_torch @require_vision class _a ( snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = DetrImageProcessor if is_vision_available() else None def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : List[str] = DetrImageProcessingTester(self ) @property def _snake_case ( self ) -> str: return self.image_processor_tester.prepare_image_processor_dict() def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[str] = self.image_processing_class(**self.image_processor_dict ) self.assertTrue(hasattr(lowercase_ , """image_mean""" ) ) self.assertTrue(hasattr(lowercase_ , """image_std""" ) ) self.assertTrue(hasattr(lowercase_ , """do_normalize""" ) ) self.assertTrue(hasattr(lowercase_ , """do_rescale""" ) ) self.assertTrue(hasattr(lowercase_ , """rescale_factor""" ) ) self.assertTrue(hasattr(lowercase_ , """do_resize""" ) ) self.assertTrue(hasattr(lowercase_ , """size""" ) ) self.assertTrue(hasattr(lowercase_ , """do_pad""" ) ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Union[str, Any] = self.image_processing_class.from_dict(self.image_processor_dict ) self.assertEqual(image_processor.size , {"""shortest_edge""": 18, """longest_edge""": 1333} ) self.assertEqual(image_processor.do_pad , lowercase_ ) lowerCAmelCase : Optional[Any] = self.image_processing_class.from_dict( self.image_processor_dict , size=42 , max_size=84 , pad_and_return_pixel_mask=lowercase_ ) self.assertEqual(image_processor.size , {"""shortest_edge""": 42, """longest_edge""": 84} ) self.assertEqual(image_processor.do_pad , lowercase_ ) def _snake_case ( self ) -> List[Any]: pass def _snake_case ( self ) -> List[Any]: # Initialize image_processing lowerCAmelCase : str = self.image_processing_class(**self.image_processor_dict ) # create random PIL images lowerCAmelCase : str = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , Image.Image ) # Test not batched input lowerCAmelCase : int = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Tuple = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) lowerCAmelCase : Optional[int] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> Optional[int]: # Initialize image_processing lowerCAmelCase : int = self.image_processing_class(**self.image_processor_dict ) # create random numpy tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , numpify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , np.ndarray ) # Test not batched input lowerCAmelCase : List[Any] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Dict = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : List[Any] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : int = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> List[str]: # Initialize image_processing lowerCAmelCase : List[Any] = self.image_processing_class(**self.image_processor_dict ) # create random PyTorch tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , torchify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , torch.Tensor ) # Test not batched input lowerCAmelCase : Optional[int] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : str = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : List[str] = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) @slow def _snake_case ( self ) -> int: # prepare image and target lowerCAmelCase : Tuple = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_annotations.txt""" , """r""" ) as f: lowerCAmelCase : str = json.loads(f.read() ) lowerCAmelCase : List[Any] = {"""image_id""": 39769, """annotations""": target} # encode them lowerCAmelCase : Dict = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50""" ) lowerCAmelCase : List[str] = image_processing(images=lowercase_ , annotations=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Union[str, Any] = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : List[str] = torch.tensor([5_8_8_7.9_6_0_0, 1_1_2_5_0.2_0_6_1, 4_8_9_3_5_3.8_4_3_8, 8_3_7_1_2_2.7_5_0_0, 1_4_7_9_6_7.5_1_5_6, 1_6_5_7_3_2.3_4_3_8] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Tuple = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Dict = torch.tensor([0.5_5_0_3, 0.2_7_6_5, 0.0_6_0_4, 0.2_2_1_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : List[Any] = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Union[str, Any] = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : str = torch.tensor([75, 75, 63, 65, 17, 17] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify orig_size lowerCAmelCase : int = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : str = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) ) @slow def _snake_case ( self ) -> int: # prepare image, target and masks_path lowerCAmelCase : List[Any] = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_panoptic_annotations.txt""" , """r""" ) as f: lowerCAmelCase : Any = json.loads(f.read() ) lowerCAmelCase : Optional[Any] = {"""file_name""": """000000039769.png""", """image_id""": 39769, """segments_info""": target} lowerCAmelCase : List[str] = pathlib.Path("""./tests/fixtures/tests_samples/COCO/coco_panoptic""" ) # encode them lowerCAmelCase : Any = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50-panoptic""" ) lowerCAmelCase : Tuple = image_processing(images=lowercase_ , annotations=lowercase_ , masks_path=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Tuple = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : str = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : Union[str, Any] = torch.tensor([1_4_7_9_7_9.6_8_7_5, 1_6_5_5_2_7.0_4_6_9, 4_8_4_6_3_8.5_9_3_8, 1_1_2_9_2.9_3_7_5, 5_8_7_9.6_5_6_2, 7_6_3_4.1_1_4_7] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Optional[int] = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_6_2_5, 0.5_4_3_7, 0.4_6_8_8, 0.8_6_2_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : Tuple = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Any = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : Tuple = torch.tensor([17, 17, 63, 75, 75, 93] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify masks lowerCAmelCase : Union[str, Any] = 822873 self.assertEqual(encoding["""labels"""][0]["""masks"""].sum().item() , lowercase_ ) # verify orig_size lowerCAmelCase : str = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : List[str] = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) )
693
0
import importlib import sys from argparse import REMAINDER, ArgumentParser from pathlib import Path import torch_xla.distributed.xla_multiprocessing as xmp def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Optional[int] = ArgumentParser( description=( """PyTorch TPU distributed training launch helper utility that will spawn up multiple distributed processes""" ) ) # Optional arguments for the launch helper parser.add_argument("""--num_cores""" ,type=__A ,default=1 ,help="""Number of TPU cores to use (1 or 8).""" ) # positional parser.add_argument( """training_script""" ,type=__A ,help=( """The full path to the single TPU training """ """program/script to be launched in parallel, """ """followed by all the arguments for the """ """training script""" ) ,) # rest from the training program parser.add_argument("""training_script_args""" ,nargs=__A ) return parser.parse_args() def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : List[str] = parse_args() # Import training_script as a module. lowerCAmelCase : List[Any] = Path(args.training_script ) sys.path.append(str(script_fpath.parent.resolve() ) ) lowerCAmelCase : Optional[Any] = script_fpath.stem lowerCAmelCase : Dict = importlib.import_module(__A ) # Patch sys.argv lowerCAmelCase : Union[str, Any] = [args.training_script] + args.training_script_args + ["""--tpu_num_cores""", str(args.num_cores )] xmp.spawn(mod._mp_fn ,args=() ,nprocs=args.num_cores ) if __name__ == "__main__": main()
715
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = 0 while b > 0: if b & 1: res += a a += a b >>= 1 return res def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = 0 while b > 0: if b & 1: lowerCAmelCase : Optional[int] = ((res % c) + (a % c)) % c a += a b >>= 1 return res
693
0
from __future__ import annotations lowerCAmelCase : List[str] =1.6_021e-19 # units = C def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,): '''simple docstring''' if (conductivity, electron_conc, mobility).count(0 ) != 1: raise ValueError("""You cannot supply more or less than 2 values""" ) elif conductivity < 0: raise ValueError("""Conductivity cannot be negative""" ) elif electron_conc < 0: raise ValueError("""Electron concentration cannot be negative""" ) elif mobility < 0: raise ValueError("""mobility cannot be negative""" ) elif conductivity == 0: return ( "conductivity", mobility * electron_conc * ELECTRON_CHARGE, ) elif electron_conc == 0: return ( "electron_conc", conductivity / (mobility * ELECTRON_CHARGE), ) else: return ( "mobility", conductivity / (electron_conc * ELECTRON_CHARGE), ) if __name__ == "__main__": import doctest doctest.testmod()
716
from math import factorial class _a : def __init__( self , lowercase_ , lowercase_ ) -> Optional[Any]: lowerCAmelCase : Union[str, Any] = real if isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Tuple = [1] * rank else: lowerCAmelCase : Any = rank def __repr__( self ) -> int: return ( f"""{self.real}+""" f"""{'+'.join(str(lowercase_ )+'E'+str(n+1 )for n,dual in enumerate(self.duals ) )}""" ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[Any] = self.duals.copy() while cur[-1] == 0: cur.pop(-1 ) return Dual(self.real , lowercase_ ) def __add__( self , lowercase_ ) -> Tuple: if not isinstance(lowercase_ , lowercase_ ): return Dual(self.real + other , self.duals ) lowerCAmelCase : int = self.duals.copy() lowerCAmelCase : Tuple = other.duals.copy() if len(lowercase_ ) > len(lowercase_ ): o_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) elif len(lowercase_ ) < len(lowercase_ ): s_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) lowerCAmelCase : List[Any] = [] for i in range(len(lowercase_ ) ): new_duals.append(s_dual[i] + o_dual[i] ) return Dual(self.real + other.real , lowercase_ ) _UpperCamelCase: List[Any] = __add__ def __sub__( self , lowercase_ ) -> Union[str, Any]: return self + other * -1 def __mul__( self , lowercase_ ) -> Optional[int]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Union[str, Any] = [] for i in self.duals: new_duals.append(i * other ) return Dual(self.real * other , lowercase_ ) lowerCAmelCase : Union[str, Any] = [0] * (len(self.duals ) + len(other.duals ) + 1) for i, item in enumerate(self.duals ): for j, jtem in enumerate(other.duals ): new_duals[i + j + 1] += item * jtem for k in range(len(self.duals ) ): new_duals[k] += self.duals[k] * other.real for index in range(len(other.duals ) ): new_duals[index] += other.duals[index] * self.real return Dual(self.real * other.real , lowercase_ ) _UpperCamelCase: str = __mul__ def __truediv__( self , lowercase_ ) -> Optional[Any]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[str] = [] for i in self.duals: new_duals.append(i / other ) return Dual(self.real / other , lowercase_ ) raise ValueError def __floordiv__( self , lowercase_ ) -> int: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = [] for i in self.duals: new_duals.append(i // other ) return Dual(self.real // other , lowercase_ ) raise ValueError def __pow__( self , lowercase_ ) -> str: if n < 0 or isinstance(lowercase_ , lowercase_ ): raise ValueError("""power must be a positive integer""" ) if n == 0: return 1 if n == 1: return self lowerCAmelCase : int = self for _ in range(n - 1 ): x *= self return x def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if not callable(SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires a function as input for func""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,(float, int) ): raise ValueError("""differentiate() requires a float as input for position""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires an int as input for order""" ) lowerCAmelCase : List[Any] = Dual(SCREAMING_SNAKE_CASE__ ,1 ) lowerCAmelCase : Optional[Any] = func(SCREAMING_SNAKE_CASE__ ) if order == 0: return result.real return result.duals[order - 1] * factorial(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": import doctest doctest.testmod() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return y**2 * y**4 print(differentiate(f, 9, 2))
693
0
import unittest import torch from torch import nn from diffusers.models.activations import get_activation class _a ( unittest.TestCase ): def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : List[str] = get_activation("""swish""" ) self.assertIsInstance(__lowerCamelCase , nn.SiLU ) self.assertEqual(act(torch.tensor(-100 , dtype=torch.floataa ) ).item() , 0 ) self.assertNotEqual(act(torch.tensor(-1 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(0 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(20 , dtype=torch.floataa ) ).item() , 20 ) def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Any = get_activation("""silu""" ) self.assertIsInstance(__lowerCamelCase , nn.SiLU ) self.assertEqual(act(torch.tensor(-100 , dtype=torch.floataa ) ).item() , 0 ) self.assertNotEqual(act(torch.tensor(-1 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(0 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(20 , dtype=torch.floataa ) ).item() , 20 ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Optional[Any] = get_activation("""mish""" ) self.assertIsInstance(__lowerCamelCase , nn.Mish ) self.assertEqual(act(torch.tensor(-200 , dtype=torch.floataa ) ).item() , 0 ) self.assertNotEqual(act(torch.tensor(-1 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(0 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(20 , dtype=torch.floataa ) ).item() , 20 ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Any = get_activation("""gelu""" ) self.assertIsInstance(__lowerCamelCase , nn.GELU ) self.assertEqual(act(torch.tensor(-100 , dtype=torch.floataa ) ).item() , 0 ) self.assertNotEqual(act(torch.tensor(-1 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(0 , dtype=torch.floataa ) ).item() , 0 ) self.assertEqual(act(torch.tensor(20 , dtype=torch.floataa ) ).item() , 20 )
717
from ..utils import DummyObject, requires_backends class _a ( metaclass=snake_case_ ): _UpperCamelCase: List[Any] = ["keras_nlp"] def __init__( self , *lowercase_ , **lowercase_ ) -> Tuple: requires_backends(self , ["""keras_nlp"""] )
693
0
import random from typing import Any def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' for _ in range(len(a_ ) ): lowerCAmelCase : Tuple = random.randint(0 ,len(a_ ) - 1 ) lowerCAmelCase : str = random.randint(0 ,len(a_ ) - 1 ) lowerCAmelCase : Union[str, Any] = data[b], data[a] return data if __name__ == "__main__": lowerCAmelCase : Tuple =[0, 1, 2, 3, 4, 5, 6, 7] lowerCAmelCase : List[Any] =["python", "says", "hello", "!"] print('Fisher-Yates Shuffle:') print('List', integers, strings) print('FY Shuffle', fisher_yates_shuffle(integers), fisher_yates_shuffle(strings))
718
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version('>=', '4.25.0')): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: from ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline else: from .pipeline_unclip import UnCLIPPipeline from .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline from .text_proj import UnCLIPTextProjModel
693
0
import datasets import faiss import numpy as np import streamlit as st import torch from elasticsearch import Elasticsearch from elia_utils import ( embed_questions_for_retrieval, make_qa_sas_model, qa_sas_generate, query_es_index, query_qa_dense_index, ) import transformers from transformers import AutoModel, AutoModelForSeqaSeqLM, AutoTokenizer lowerCAmelCase : int ='''bart''' lowerCAmelCase : str =True @st.cache(allow_output_mutation=_A ) def _UpperCAmelCase ( ): '''simple docstring''' if LOAD_DENSE_INDEX: lowerCAmelCase : int = AutoTokenizer.from_pretrained("""yjernite/retribert-base-uncased""" ) lowerCAmelCase : Optional[Any] = AutoModel.from_pretrained("""yjernite/retribert-base-uncased""" ).to("""cuda:0""" ) lowerCAmelCase : Optional[Any] = qar_model.eval() else: lowerCAmelCase , lowerCAmelCase : Union[str, Any] = (None, None) if MODEL_TYPE == "bart": lowerCAmelCase : Union[str, Any] = AutoTokenizer.from_pretrained("""yjernite/bart_eli5""" ) lowerCAmelCase : List[str] = AutoModelForSeqaSeqLM.from_pretrained("""yjernite/bart_eli5""" ).to("""cuda:0""" ) lowerCAmelCase : int = torch.load("""seq2seq_models/eli5_bart_model_blm_2.pth""" ) sas_model.load_state_dict(save_dict["""model"""] ) lowerCAmelCase : int = sas_model.eval() else: lowerCAmelCase , lowerCAmelCase : Dict = make_qa_sas_model( model_name="""t5-small""" ,from_file="""seq2seq_models/eli5_t5_model_1024_4.pth""" ,device="""cuda:0""" ) return (qar_tokenizer, qar_model, sas_tokenizer, sas_model) @st.cache(allow_output_mutation=_A ) def _UpperCAmelCase ( ): '''simple docstring''' if LOAD_DENSE_INDEX: lowerCAmelCase : Tuple = faiss.StandardGpuResources() lowerCAmelCase : int = datasets.load_dataset(path="""wiki_snippets""" ,name="""wiki40b_en_100_0""" )["""train"""] lowerCAmelCase : Union[str, Any] = np.memmap( """wiki40b_passages_reps_32_l-8_h-768_b-512-512.dat""" ,dtype="""float32""" ,mode="""r""" ,shape=(wikiaab_passages.num_rows, 1_2_8) ,) lowerCAmelCase : List[Any] = faiss.IndexFlatIP(1_2_8 ) lowerCAmelCase : Optional[int] = faiss.index_cpu_to_gpu(_A ,1 ,_A ) wikiaab_gpu_index_flat.add(_A ) # TODO fix for larger GPU else: lowerCAmelCase , lowerCAmelCase : Any = (None, None) lowerCAmelCase : Optional[int] = Elasticsearch([{"""host""": """localhost""", """port""": """9200"""}] ) return (wikiaab_passages, wikiaab_gpu_index_flat, es_client) @st.cache(allow_output_mutation=_A ) def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = datasets.load_dataset("""eli5""" ,name="""LFQA_reddit""" ) lowerCAmelCase : Any = elia["""train_eli5"""] lowerCAmelCase : Dict = np.memmap( """eli5_questions_reps.dat""" ,dtype="""float32""" ,mode="""r""" ,shape=(elia_train.num_rows, 1_2_8) ) lowerCAmelCase : Optional[int] = faiss.IndexFlatIP(1_2_8 ) eli5_train_q_index.add(_A ) return (elia_train, eli5_train_q_index) lowerCAmelCase : str =load_indexes() lowerCAmelCase : Union[str, Any] =load_models() lowerCAmelCase : Tuple =load_train_data() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=1_0 ): '''simple docstring''' lowerCAmelCase : Dict = embed_questions_for_retrieval([question] ,_A ,_A ) lowerCAmelCase , lowerCAmelCase : Tuple = eli5_train_q_index.search(_A ,_A ) lowerCAmelCase : Any = [elia_train[int(_A )] for i in I[0]] return nn_examples def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__="wiki40b" ,SCREAMING_SNAKE_CASE__="dense" ,SCREAMING_SNAKE_CASE__=1_0 ): '''simple docstring''' if source == "none": lowerCAmelCase , lowerCAmelCase : Tuple = (""" <P> """.join(["""""" for _ in range(1_1 )] ).strip(), []) else: if method == "dense": lowerCAmelCase , lowerCAmelCase : Optional[Any] = query_qa_dense_index( _A ,_A ,_A ,_A ,_A ,_A ) else: lowerCAmelCase , lowerCAmelCase : int = query_es_index( _A ,_A ,index_name="""english_wiki40b_snippets_100w""" ,n_results=_A ,) lowerCAmelCase : Union[str, Any] = [ (res["""article_title"""], res["""section_title"""].strip(), res["""score"""], res["""passage_text"""]) for res in hit_lst ] lowerCAmelCase : int = """question: {} context: {}""".format(_A ,_A ) return question_doc, support_list @st.cache( hash_funcs={ torch.Tensor: (lambda SCREAMING_SNAKE_CASE__ : None), transformers.models.bart.tokenization_bart.BartTokenizer: (lambda SCREAMING_SNAKE_CASE__ : None), } ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=6_4 ,SCREAMING_SNAKE_CASE__=2_5_6 ,SCREAMING_SNAKE_CASE__=False ,SCREAMING_SNAKE_CASE__=2 ,SCREAMING_SNAKE_CASE__=0.95 ,SCREAMING_SNAKE_CASE__=0.8 ): '''simple docstring''' with torch.no_grad(): lowerCAmelCase : str = qa_sas_generate( _A ,_A ,_A ,num_answers=1 ,num_beams=_A ,min_len=_A ,max_len=_A ,do_sample=_A ,temp=_A ,top_p=_A ,top_k=_A ,max_input_length=1_0_2_4 ,device="""cuda:0""" ,)[0] return (answer, support_list) st.title('Long Form Question Answering with ELI5') # Start sidebar lowerCAmelCase : Optional[Any] ='''<img src=\'https://huggingface.co/front/assets/huggingface_logo.svg\'>''' lowerCAmelCase : Optional[int] =''' <html> <head> <style> .img-container { padding-left: 90px; padding-right: 90px; padding-top: 50px; padding-bottom: 50px; background-color: #f0f3f9; } </style> </head> <body> <span class="img-container"> <!-- Inline parent element --> %s </span> </body> </html> ''' % ( header_html, ) st.sidebar.markdown( header_full, unsafe_allow_html=True, ) # Long Form QA with ELI5 and Wikipedia lowerCAmelCase : Any =''' This demo presents a model trained to [provide long-form answers to open-domain questions](https://yjernite.github.io/lfqa.html). First, a document retriever fetches a set of relevant Wikipedia passages given the question from the [Wiki40b](https://research.google/pubs/pub49029/) dataset, a pre-processed fixed snapshot of Wikipedia. ''' st.sidebar.markdown(description, unsafe_allow_html=True) lowerCAmelCase : Dict =[ '''Answer the question''', '''View the retrieved document only''', '''View the most similar ELI5 question and answer''', '''Show me everything, please!''', ] lowerCAmelCase : Optional[Any] =st.sidebar.checkbox('Demo options') if demo_options: lowerCAmelCase : List[str] =st.sidebar.selectbox( '', action_list, index=3, ) lowerCAmelCase : Any =action_list.index(action_st) lowerCAmelCase : List[Any] =st.sidebar.selectbox( '', ['Show full text of passages', 'Show passage section titles'], index=0, ) lowerCAmelCase : Optional[Any] =show_type == '''Show full text of passages''' else: lowerCAmelCase : Optional[int] =3 lowerCAmelCase : Optional[Any] =True lowerCAmelCase : Optional[Any] =st.sidebar.checkbox('Retrieval options') if retrieval_options: lowerCAmelCase : Union[str, Any] =''' ### Information retriever options The **sparse** retriever uses ElasticSearch, while the **dense** retriever uses max-inner-product search between a question and passage embedding trained using the [ELI5](https://arxiv.org/abs/1907.09190) questions-answer pairs. The answer is then generated by sequence to sequence model which takes the question and retrieved document as input. ''' st.sidebar.markdown(retriever_info) lowerCAmelCase : Dict =st.sidebar.selectbox('Which Wikipedia format should the model use?', ['wiki40b', 'none']) lowerCAmelCase : List[Any] =st.sidebar.selectbox('Which Wikipedia indexer should the model use?', ['dense', 'sparse', 'mixed']) else: lowerCAmelCase : int ='''wiki40b''' lowerCAmelCase : Dict ='''dense''' lowerCAmelCase : Any ='''beam''' lowerCAmelCase : str =2 lowerCAmelCase : Tuple =64 lowerCAmelCase : Union[str, Any] =256 lowerCAmelCase : Optional[int] =None lowerCAmelCase : Tuple =None lowerCAmelCase : str =st.sidebar.checkbox('Generation options') if generate_options: lowerCAmelCase : str =''' ### Answer generation options The sequence-to-sequence model was initialized with [BART](https://huggingface.co/facebook/bart-large) weights and fine-tuned on the ELI5 QA pairs and retrieved documents. You can use the model for greedy decoding with **beam** search, or **sample** from the decoder\'s output probabilities. ''' st.sidebar.markdown(generate_info) lowerCAmelCase : Dict =st.sidebar.selectbox('Would you like to use beam search or sample an answer?', ['beam', 'sampled']) lowerCAmelCase : List[Any] =st.sidebar.slider( 'Minimum generation length', min_value=8, max_value=256, value=64, step=8, format=None, key=None ) lowerCAmelCase : Optional[int] =st.sidebar.slider( 'Maximum generation length', min_value=64, max_value=512, value=256, step=16, format=None, key=None ) if sampled == "beam": lowerCAmelCase : List[Any] =st.sidebar.slider('Beam size', min_value=1, max_value=8, value=2, step=None, format=None, key=None) else: lowerCAmelCase : int =st.sidebar.slider( 'Nucleus sampling p', min_value=0.1, max_value=1.0, value=0.9_5, step=0.0_1, format=None, key=None ) lowerCAmelCase : Optional[Any] =st.sidebar.slider( 'Temperature', min_value=0.1, max_value=1.0, value=0.7, step=0.0_1, format=None, key=None ) lowerCAmelCase : Optional[Any] =None # start main text lowerCAmelCase : Union[str, Any] =[ '''<MY QUESTION>''', '''How do people make chocolate?''', '''Why do we get a fever when we are sick?''', '''How can different animals perceive different colors?''', '''What is natural language processing?''', '''What\'s the best way to treat a sunburn?''', '''What exactly are vitamins ?''', '''How does nuclear energy provide electricity?''', '''What\'s the difference between viruses and bacteria?''', '''Why are flutes classified as woodwinds when most of them are made out of metal ?''', '''Why do people like drinking coffee even though it tastes so bad?''', '''What happens when wine ages? How does it make the wine taste better?''', '''If an animal is an herbivore, where does it get the protein that it needs to survive if it only eats grass?''', '''How can we set a date to the beginning or end of an artistic period? Doesn\'t the change happen gradually?''', '''How does New Zealand have so many large bird predators?''', ] lowerCAmelCase : Dict =st.selectbox( 'What would you like to ask? ---- select <MY QUESTION> to enter a new query', questions_list, index=1, ) if question_s == "<MY QUESTION>": lowerCAmelCase : Union[str, Any] =st.text_input('Enter your question here:', '') else: lowerCAmelCase : Tuple =question_s if st.button('Show me!'): if action in [0, 1, 3]: if index_type == "mixed": lowerCAmelCase : List[Any] =make_support(question, source=wiki_source, method='dense', n_results=10) lowerCAmelCase : Any =make_support(question, source=wiki_source, method='sparse', n_results=10) lowerCAmelCase : Any =[] for res_d, res_s in zip(support_list_dense, support_list_sparse): if tuple(res_d) not in support_list: support_list += [tuple(res_d)] if tuple(res_s) not in support_list: support_list += [tuple(res_s)] lowerCAmelCase : List[Any] =support_list[:10] lowerCAmelCase : List[Any] ='''<P> ''' + ''' <P> '''.join([res[-1] for res in support_list]) else: lowerCAmelCase : Any =make_support(question, source=wiki_source, method=index_type, n_results=10) if action in [0, 3]: lowerCAmelCase : Any =answer_question( question_doc, sas_model, sas_tokenizer, min_len=min_len, max_len=int(max_len), sampling=(sampled == 'sampled'), n_beams=n_beams, top_p=top_p, temp=temp, ) st.markdown('### The model generated answer is:') st.write(answer) if action in [0, 1, 3] and wiki_source != "none": st.markdown('--- \n ### The model is drawing information from the following Wikipedia passages:') for i, res in enumerate(support_list): lowerCAmelCase : str ='''https://en.wikipedia.org/wiki/{}'''.format(res[0].replace(' ', '_')) lowerCAmelCase : Tuple =res[1].strip() if sec_titles == "": lowerCAmelCase : List[Any] ='''[{}]({})'''.format(res[0], wiki_url) else: lowerCAmelCase : Union[str, Any] =sec_titles.split(' & ') lowerCAmelCase : Optional[Any] =''' & '''.join( ['[{}]({}#{})'.format(sec.strip(), wiki_url, sec.strip().replace(' ', '_')) for sec in sec_list] ) st.markdown( '{0:02d} - **Article**: {1:<18} <br> _Section_: {2}'.format(i + 1, res[0], sections), unsafe_allow_html=True, ) if show_passages: st.write( '> <span style="font-family:arial; font-size:10pt;">' + res[-1] + '</span>', unsafe_allow_html=True ) if action in [2, 3]: lowerCAmelCase : int =find_nearest_training(question) lowerCAmelCase : int =nn_train_list[0] st.markdown( '--- \n ### The most similar question in the ELI5 training set was: \n\n {}'.format(train_exple['title']) ) lowerCAmelCase : Union[str, Any] =[ '''{}. {}'''.format(i + 1, ' \n'.join([line.strip() for line in ans.split('\n') if line.strip() != ''])) for i, (ans, sc) in enumerate(zip(train_exple['answers']['text'], train_exple['answers']['score'])) if i == 0 or sc > 2 ] st.markdown('##### Its answers were: \n\n {}'.format('\n'.join(answers_st))) lowerCAmelCase : Any =''' --- **Disclaimer** *The intent of this app is to provide some (hopefully entertaining) insights into the behavior of a current LFQA system. Evaluating biases of such a model and ensuring factual generations are still very much open research problems. Therefore, until some significant progress is achieved, we caution against using the generated answers for practical purposes.* ''' st.sidebar.markdown(disclaimer, unsafe_allow_html=True)
719
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if p < 2: raise ValueError("""p should not be less than 2!""" ) elif p == 2: return True lowerCAmelCase : List[Any] = 4 lowerCAmelCase : Tuple = (1 << p) - 1 for _ in range(p - 2 ): lowerCAmelCase : Dict = ((s * s) - 2) % m return s == 0 if __name__ == "__main__": print(lucas_lehmer_test(7)) print(lucas_lehmer_test(11))
693
0
from typing import Dict, List, Optional, Tuple, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) from ...image_utils import ( IMAGENET_STANDARD_MEAN, IMAGENET_STANDARD_STD, ChannelDimension, ImageInput, PILImageResampling, make_list_of_images, to_numpy_array, valid_images, ) from ...utils import TensorType, is_torch_available, is_torch_tensor, logging if is_torch_available(): import torch lowerCAmelCase : Dict =logging.get_logger(__name__) class _a ( _a ): _UpperCamelCase: List[str] = ["""pixel_values"""] def __init__( self , lowercase_ = True , lowercase_ = None , lowercase_ = PILImageResampling.BILINEAR , lowercase_ = True , lowercase_ = None , lowercase_ = True , lowercase_ = 1 / 255 , lowercase_ = True , lowercase_ = None , lowercase_ = None , **lowercase_ , ) -> None: super().__init__(**snake_case_ ) lowerCAmelCase : List[str] = size if size is not None else {"""shortest_edge""": 256} lowerCAmelCase : Dict = get_size_dict(snake_case_ , default_to_square=snake_case_ ) lowerCAmelCase : List[Any] = crop_size if crop_size is not None else {"""height""": 224, """width""": 224} lowerCAmelCase : Optional[int] = get_size_dict(snake_case_ , param_name="""crop_size""" ) lowerCAmelCase : Any = do_resize lowerCAmelCase : Optional[Any] = size lowerCAmelCase : Optional[Any] = resample lowerCAmelCase : int = do_center_crop lowerCAmelCase : Union[str, Any] = crop_size lowerCAmelCase : Dict = do_rescale lowerCAmelCase : Any = rescale_factor lowerCAmelCase : Tuple = do_normalize lowerCAmelCase : List[str] = image_mean if image_mean is not None else IMAGENET_STANDARD_MEAN lowerCAmelCase : Tuple = image_std if image_std is not None else IMAGENET_STANDARD_STD def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = PILImageResampling.BICUBIC , lowercase_ = None , **lowercase_ , ) -> np.ndarray: lowerCAmelCase : int = get_size_dict(snake_case_ , default_to_square=snake_case_ ) if "shortest_edge" not in size: raise ValueError(f"""The `size` parameter must contain the key `shortest_edge`. Got {size.keys()}""" ) lowerCAmelCase : List[str] = get_resize_output_image_size(snake_case_ , size=size["""shortest_edge"""] , default_to_square=snake_case_ ) return resize(snake_case_ , size=snake_case_ , resample=snake_case_ , data_format=snake_case_ , **snake_case_ ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ , ) -> np.ndarray: lowerCAmelCase : Union[str, Any] = get_size_dict(snake_case_ ) if "height" not in size or "width" not in size: raise ValueError(f"""The `size` parameter must contain the keys `height` and `width`. Got {size.keys()}""" ) return center_crop(snake_case_ , size=(size["""height"""], size["""width"""]) , data_format=snake_case_ , **snake_case_ ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ ) -> np.ndarray: return rescale(snake_case_ , scale=snake_case_ , data_format=snake_case_ , **snake_case_ ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ , ) -> np.ndarray: return normalize(snake_case_ , mean=snake_case_ , std=snake_case_ , data_format=snake_case_ , **snake_case_ ) def _snake_case ( self , lowercase_ , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = ChannelDimension.FIRST , **lowercase_ , ) -> Tuple: lowerCAmelCase : List[Any] = do_resize if do_resize is not None else self.do_resize lowerCAmelCase : str = size if size is not None else self.size lowerCAmelCase : Optional[int] = get_size_dict(snake_case_ , default_to_square=snake_case_ ) lowerCAmelCase : Union[str, Any] = resample if resample is not None else self.resample lowerCAmelCase : Tuple = do_center_crop if do_center_crop is not None else self.do_center_crop lowerCAmelCase : Dict = crop_size if crop_size is not None else self.crop_size lowerCAmelCase : Optional[Any] = get_size_dict(snake_case_ , param_name="""crop_size""" ) lowerCAmelCase : List[Any] = do_rescale if do_rescale is not None else self.do_rescale lowerCAmelCase : str = rescale_factor if rescale_factor is not None else self.rescale_factor lowerCAmelCase : Tuple = do_normalize if do_normalize is not None else self.do_normalize lowerCAmelCase : int = image_mean if image_mean is not None else self.image_mean lowerCAmelCase : List[str] = image_std if image_std is not None else self.image_std lowerCAmelCase : Union[str, Any] = make_list_of_images(snake_case_ ) if not valid_images(snake_case_ ): raise ValueError( """Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, """ """torch.Tensor, tf.Tensor or jax.ndarray.""" ) if do_resize and size is None: raise ValueError("""Size must be specified if do_resize is True.""" ) if do_center_crop and crop_size is None: raise ValueError("""Crop size must be specified if do_center_crop is True.""" ) if do_rescale and rescale_factor is None: raise ValueError("""Rescale factor must be specified if do_rescale is True.""" ) if do_normalize and (image_mean is None or image_std is None): raise ValueError("""Image mean and std must be specified if do_normalize is True.""" ) # All transformations expect numpy arrays. lowerCAmelCase : Optional[int] = [to_numpy_array(snake_case_ ) for image in images] if do_resize: lowerCAmelCase : Optional[Any] = [self.resize(image=snake_case_ , size=snake_case_ , resample=snake_case_ ) for image in images] if do_center_crop: lowerCAmelCase : List[str] = [self.center_crop(image=snake_case_ , size=snake_case_ ) for image in images] if do_rescale: lowerCAmelCase : Union[str, Any] = [self.rescale(image=snake_case_ , scale=snake_case_ ) for image in images] if do_normalize: lowerCAmelCase : int = [self.normalize(image=snake_case_ , mean=snake_case_ , std=snake_case_ ) for image in images] lowerCAmelCase : Union[str, Any] = [to_channel_dimension_format(snake_case_ , snake_case_ ) for image in images] lowerCAmelCase : Tuple = {"""pixel_values""": images} return BatchFeature(data=snake_case_ , tensor_type=snake_case_ ) def _snake_case ( self , lowercase_ , lowercase_ = None ) -> Union[str, Any]: lowerCAmelCase : List[Any] = outputs.logits # Resize logits and compute semantic segmentation maps if target_sizes is not None: if len(snake_case_ ) != len(snake_case_ ): raise ValueError( """Make sure that you pass in as many target sizes as the batch dimension of the logits""" ) if is_torch_tensor(snake_case_ ): lowerCAmelCase : Optional[int] = target_sizes.numpy() lowerCAmelCase : str = [] for idx in range(len(snake_case_ ) ): lowerCAmelCase : Union[str, Any] = torch.nn.functional.interpolate( logits[idx].unsqueeze(dim=0 ) , size=target_sizes[idx] , mode="""bilinear""" , align_corners=snake_case_ ) lowerCAmelCase : str = resized_logits[0].argmax(dim=0 ) semantic_segmentation.append(snake_case_ ) else: lowerCAmelCase : List[Any] = logits.argmax(dim=1 ) lowerCAmelCase : str = [semantic_segmentation[i] for i in range(semantic_segmentation.shape[0] )] return semantic_segmentation
720
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin from . import IFPipelineTesterMixin @skip_mps class _a ( snake_case_ , snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = IFImgaImgSuperResolutionPipeline _UpperCamelCase: int = TEXT_GUIDED_IMAGE_VARIATION_PARAMS - {"width", "height"} _UpperCamelCase: Optional[int] = TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({"original_image"} ) _UpperCamelCase: List[str] = PipelineTesterMixin.required_optional_params - {"latents"} def _snake_case ( self ) -> int: return self._get_superresolution_dummy_components() def _snake_case ( self , lowercase_ , lowercase_=0 ) -> Optional[Any]: if str(lowercase_ ).startswith("""mps""" ): lowerCAmelCase : Any = torch.manual_seed(lowercase_ ) else: lowerCAmelCase : List[Any] = torch.Generator(device=lowercase_ ).manual_seed(lowercase_ ) lowerCAmelCase : Union[str, Any] = floats_tensor((1, 3, 32, 32) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[Any] = floats_tensor((1, 3, 16, 16) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[int] = { """prompt""": """A painting of a squirrel eating a burger""", """image""": image, """original_image""": original_image, """generator""": generator, """num_inference_steps""": 2, """output_type""": """numpy""", } return inputs @unittest.skipIf( torch_device != """cuda""" or not is_xformers_available() , reason="""XFormers attention is only available with CUDA and `xformers` installed""" , ) def _snake_case ( self ) -> Optional[int]: self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 ) def _snake_case ( self ) -> int: self._test_save_load_optional_components() @unittest.skipIf(torch_device != """cuda""" , reason="""float16 requires CUDA""" ) def _snake_case ( self ) -> Any: # Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder super().test_save_load_floataa(expected_max_diff=1e-1 ) def _snake_case ( self ) -> int: self._test_attention_slicing_forward_pass(expected_max_diff=1e-2 ) def _snake_case ( self ) -> Any: self._test_save_load_local() def _snake_case ( self ) -> str: self._test_inference_batch_single_identical( expected_max_diff=1e-2 , )
693
0
from dataclasses import dataclass, field from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Union import pyarrow as pa if TYPE_CHECKING: from .features import FeatureType @dataclass class _a : _UpperCamelCase: str = 42 _UpperCamelCase: int = None # Automatically constructed _UpperCamelCase: Any = "dict" _UpperCamelCase: Optional[Any] = None _UpperCamelCase: Optional[Any] = field(default="Translation" , init=SCREAMING_SNAKE_CASE__ , repr=SCREAMING_SNAKE_CASE__ ) def __call__( self ) -> List[Any]: return pa.struct({lang: pa.string() for lang in sorted(self.languages )} ) def _snake_case ( self ) -> Union["FeatureType", Dict[str, "FeatureType"]]: from .features import Value return {k: Value("""string""" ) for k in sorted(self.languages )} @dataclass class _a : _UpperCamelCase: Tuple = None _UpperCamelCase: Tuple = None _UpperCamelCase: str = None # Automatically constructed _UpperCamelCase: Union[str, Any] = "dict" _UpperCamelCase: Union[str, Any] = None _UpperCamelCase: Optional[int] = field(default="TranslationVariableLanguages" , init=SCREAMING_SNAKE_CASE__ , repr=SCREAMING_SNAKE_CASE__ ) def _snake_case ( self ) -> Any: lowerCAmelCase : Tuple = sorted(set(self.languages ) ) if self.languages else None lowerCAmelCase : Optional[int] = len(self.languages ) if self.languages else None def __call__( self ) -> str: return pa.struct({"""language""": pa.list_(pa.string() ), """translation""": pa.list_(pa.string() )} ) def _snake_case ( self , lowercase_ ) -> Tuple: lowerCAmelCase : int = set(self.languages ) if self.languages and set(_lowercase ) - lang_set: raise ValueError( f"""Some languages in example ({', '.join(sorted(set(_lowercase ) - lang_set ) )}) are not in valid set ({', '.join(_lowercase )}).""" ) # Convert dictionary into tuples, splitting out cases where there are # multiple translations for a single language. lowerCAmelCase : List[Any] = [] for lang, text in translation_dict.items(): if isinstance(_lowercase , _lowercase ): translation_tuples.append((lang, text) ) else: translation_tuples.extend([(lang, el) for el in text] ) # Ensure translations are in ascending order by language code. lowerCAmelCase : str = zip(*sorted(_lowercase ) ) return {"language": languages, "translation": translations} def _snake_case ( self ) -> Union["FeatureType", Dict[str, "FeatureType"]]: from .features import Sequence, Value return { "language": Sequence(Value("""string""" ) ), "translation": Sequence(Value("""string""" ) ), }
721
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={} class _a ( snake_case_ ): _UpperCamelCase: Tuple = "llama" _UpperCamelCase: List[str] = ["past_key_values"] def __init__( self , lowercase_=32000 , lowercase_=4096 , lowercase_=11008 , lowercase_=32 , lowercase_=32 , lowercase_=None , lowercase_="silu" , lowercase_=2048 , lowercase_=0.0_2 , lowercase_=1e-6 , lowercase_=True , lowercase_=0 , lowercase_=1 , lowercase_=2 , lowercase_=1 , lowercase_=False , lowercase_=None , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : str = max_position_embeddings lowerCAmelCase : int = hidden_size lowerCAmelCase : Optional[int] = intermediate_size lowerCAmelCase : int = num_hidden_layers lowerCAmelCase : Any = num_attention_heads # for backward compatibility if num_key_value_heads is None: lowerCAmelCase : Any = num_attention_heads lowerCAmelCase : Any = num_key_value_heads lowerCAmelCase : Any = hidden_act lowerCAmelCase : Union[str, Any] = initializer_range lowerCAmelCase : str = rms_norm_eps lowerCAmelCase : int = pretraining_tp lowerCAmelCase : int = use_cache lowerCAmelCase : Optional[Any] = rope_scaling self._rope_scaling_validation() super().__init__( pad_token_id=lowercase_ , bos_token_id=lowercase_ , eos_token_id=lowercase_ , tie_word_embeddings=lowercase_ , **lowercase_ , ) def _snake_case ( self ) -> Dict: if self.rope_scaling is None: return if not isinstance(self.rope_scaling , lowercase_ ) or len(self.rope_scaling ) != 2: raise ValueError( """`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, """ f"""got {self.rope_scaling}""" ) lowerCAmelCase : Union[str, Any] = self.rope_scaling.get("""type""" , lowercase_ ) lowerCAmelCase : Dict = self.rope_scaling.get("""factor""" , lowercase_ ) if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]: raise ValueError( f"""`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}""" ) if rope_scaling_factor is None or not isinstance(lowercase_ , lowercase_ ) or rope_scaling_factor <= 1.0: raise ValueError(f"""`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}""" )
693
0
'''simple docstring''' from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Dict =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={ 'tanreinama/GPTSAN-2.8B-spout_is_uniform': ( 'https://huggingface.co/tanreinama/GPTSAN-2.8B-spout_is_uniform/resolve/main/config.json' ), } class _a ( _lowercase ): _UpperCamelCase: str = '''gptsan-japanese''' _UpperCamelCase: Dict = [ '''past_key_values''', ] _UpperCamelCase: Dict = { '''hidden_size''': '''d_model''', '''num_attention_heads''': '''num_heads''', '''num_hidden_layers''': '''num_layers''', } def __init__( self , lowercase_=36000 , lowercase_=1280 , lowercase_=1024 , lowercase_=8192 , lowercase_=4096 , lowercase_=128 , lowercase_=10 , lowercase_=0 , lowercase_=16 , lowercase_=16 , lowercase_=128 , lowercase_=0.0 , lowercase_=1e-5 , lowercase_=False , lowercase_=0.0 , lowercase_="float32" , lowercase_=False , lowercase_=False , lowercase_=False , lowercase_=0.0_0_2 , lowercase_=False , lowercase_=True , lowercase_=35998 , lowercase_=35995 , lowercase_=35999 , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : Tuple = vocab_size lowerCAmelCase : int = max_position_embeddings lowerCAmelCase : Dict = d_model lowerCAmelCase : List[Any] = d_ff lowerCAmelCase : List[str] = d_ext lowerCAmelCase : Optional[int] = d_spout lowerCAmelCase : Any = num_switch_layers lowerCAmelCase : int = num_ext_layers lowerCAmelCase : str = num_switch_layers + num_ext_layers lowerCAmelCase : List[str] = num_heads lowerCAmelCase : int = num_experts lowerCAmelCase : Optional[int] = expert_capacity lowerCAmelCase : Dict = dropout_rate lowerCAmelCase : str = layer_norm_epsilon lowerCAmelCase : Optional[int] = router_bias lowerCAmelCase : str = router_jitter_noise lowerCAmelCase : Optional[int] = router_dtype lowerCAmelCase : List[str] = router_ignore_padding_tokens lowerCAmelCase : Tuple = output_hidden_states lowerCAmelCase : Tuple = output_attentions lowerCAmelCase : Optional[Any] = initializer_factor lowerCAmelCase : Union[str, Any] = output_router_logits lowerCAmelCase : Dict = use_cache super().__init__( separator_token_id=A_ , pad_token_id=A_ , eos_token_id=A_ , **A_ , )
700
from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase : int =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={ 'microsoft/swin-tiny-patch4-window7-224': ( 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json' ), # See all Swin models at https://huggingface.co/models?filter=swin } class _a ( snake_case_ , snake_case_ ): _UpperCamelCase: int = "swin" _UpperCamelCase: str = { "num_attention_heads": "num_heads", "num_hidden_layers": "num_layers", } def __init__( self , lowercase_=224 , lowercase_=4 , lowercase_=3 , lowercase_=96 , lowercase_=[2, 2, 6, 2] , lowercase_=[3, 6, 12, 24] , lowercase_=7 , lowercase_=4.0 , lowercase_=True , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.1 , lowercase_="gelu" , lowercase_=False , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=32 , lowercase_=None , lowercase_=None , **lowercase_ , ) -> Tuple: super().__init__(**lowercase_ ) lowerCAmelCase : Optional[int] = image_size lowerCAmelCase : Optional[Any] = patch_size lowerCAmelCase : Optional[Any] = num_channels lowerCAmelCase : List[Any] = embed_dim lowerCAmelCase : str = depths lowerCAmelCase : List[str] = len(lowercase_ ) lowerCAmelCase : Any = num_heads lowerCAmelCase : str = window_size lowerCAmelCase : List[str] = mlp_ratio lowerCAmelCase : List[Any] = qkv_bias lowerCAmelCase : List[str] = hidden_dropout_prob lowerCAmelCase : int = attention_probs_dropout_prob lowerCAmelCase : Any = drop_path_rate lowerCAmelCase : int = hidden_act lowerCAmelCase : int = use_absolute_embeddings lowerCAmelCase : Dict = layer_norm_eps lowerCAmelCase : Any = initializer_range lowerCAmelCase : Dict = encoder_stride # we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel # this indicates the channel dimension after the last stage of the model lowerCAmelCase : Any = int(embed_dim * 2 ** (len(lowercase_ ) - 1) ) lowerCAmelCase : Dict = ["""stem"""] + [f"""stage{idx}""" for idx in range(1 , len(lowercase_ ) + 1 )] lowerCAmelCase , lowerCAmelCase : Optional[Any] = get_aligned_output_features_output_indices( out_features=lowercase_ , out_indices=lowercase_ , stage_names=self.stage_names ) class _a ( snake_case_ ): _UpperCamelCase: int = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-4
693
0
import re from pathlib import Path from unittest import TestCase import pytest @pytest.mark.integration class _a ( lowercase__ ): def _snake_case ( self , lowercase_ ) -> List[str]: with open(lowercase_ , encoding="""utf-8""" ) as input_file: lowerCAmelCase : List[str] = re.compile(R"""(?!.*\b(?:encoding|rb|w|wb|w+|wb+|ab|ab+)\b)(?<=\s)(open)\((.*)\)""" ) lowerCAmelCase : Optional[int] = input_file.read() lowerCAmelCase : Dict = regexp.search(lowercase_ ) return match def _snake_case ( self , lowercase_ ) -> Tuple: with open(lowercase_ , encoding="""utf-8""" ) as input_file: lowerCAmelCase : Union[str, Any] = re.compile(R"""#[^\r\n]*print\(|\"[^\r\n]*print\(|\"\"\".*?print\(.*?\"\"\"|(print\()""" , re.DOTALL ) lowerCAmelCase : Optional[Any] = input_file.read() # use `re.finditer` to handle the case where the ignored groups would be matched first by `re.search` lowerCAmelCase : Optional[Any] = regexp.finditer(lowercase_ ) lowerCAmelCase : List[Any] = [match for match in matches if match is not None and match.group(1 ) is not None] return matches[0] if matches else None def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Union[str, Any] = Path("""./datasets""" ) lowerCAmelCase : Tuple = list(dataset_paths.absolute().glob("""**/*.py""" ) ) for dataset in dataset_files: if self._no_encoding_on_file_open(str(lowercase_ ) ): raise AssertionError(f"""open(...) must use utf-8 encoding in {dataset}""" ) def _snake_case ( self ) -> str: lowerCAmelCase : Any = Path("""./datasets""" ) lowerCAmelCase : Optional[Any] = list(dataset_paths.absolute().glob("""**/*.py""" ) ) for dataset in dataset_files: if self._no_print_statements(str(lowercase_ ) ): raise AssertionError(f"""print statement found in {dataset}. Use datasets.logger/logging instead.""" )
701
lowerCAmelCase : str ={ 'Pillow': 'Pillow<10.0.0', 'accelerate': 'accelerate>=0.20.3', 'av': 'av==9.2.0', 'beautifulsoup4': 'beautifulsoup4', 'black': 'black~=23.1', 'codecarbon': 'codecarbon==1.2.0', 'cookiecutter': 'cookiecutter==1.7.3', 'dataclasses': 'dataclasses', 'datasets': 'datasets!=2.5.0', 'decord': 'decord==0.6.0', 'deepspeed': 'deepspeed>=0.9.3', 'diffusers': 'diffusers', 'dill': 'dill<0.3.5', 'evaluate': 'evaluate>=0.2.0', 'fairscale': 'fairscale>0.3', 'faiss-cpu': 'faiss-cpu', 'fastapi': 'fastapi', 'filelock': 'filelock', 'flax': 'flax>=0.4.1,<=0.7.0', 'ftfy': 'ftfy', 'fugashi': 'fugashi>=1.0', 'GitPython': 'GitPython<3.1.19', 'hf-doc-builder': 'hf-doc-builder>=0.3.0', 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0', 'importlib_metadata': 'importlib_metadata', 'ipadic': 'ipadic>=1.0.0,<2.0', 'isort': 'isort>=5.5.4', 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13', 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13', 'jieba': 'jieba', 'kenlm': 'kenlm', 'keras-nlp': 'keras-nlp>=0.3.1', 'librosa': 'librosa', 'nltk': 'nltk', 'natten': 'natten>=0.14.6', 'numpy': 'numpy>=1.17', 'onnxconverter-common': 'onnxconverter-common', 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2', 'onnxruntime': 'onnxruntime>=1.4.0', 'opencv-python': 'opencv-python', 'optuna': 'optuna', 'optax': 'optax>=0.0.8,<=0.1.4', 'packaging': 'packaging>=20.0', 'parameterized': 'parameterized', 'phonemizer': 'phonemizer', 'protobuf': 'protobuf', 'psutil': 'psutil', 'pyyaml': 'pyyaml>=5.1', 'pydantic': 'pydantic<2', 'pytest': 'pytest>=7.2.0', 'pytest-timeout': 'pytest-timeout', 'pytest-xdist': 'pytest-xdist', 'python': 'python>=3.8.0', 'ray[tune]': 'ray[tune]', 'regex': 'regex!=2019.12.17', 'requests': 'requests', 'rhoknp': 'rhoknp>=1.1.0,<1.3.1', 'rjieba': 'rjieba', 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1', 'ruff': 'ruff>=0.0.241,<=0.0.259', 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0', 'sacremoses': 'sacremoses', 'safetensors': 'safetensors>=0.3.1', 'sagemaker': 'sagemaker>=2.31.0', 'scikit-learn': 'scikit-learn', 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92', 'sigopt': 'sigopt', 'starlette': 'starlette', 'sudachipy': 'sudachipy>=0.6.6', 'sudachidict_core': 'sudachidict_core>=20220729', 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14', 'tensorflow': 'tensorflow>=2.6,<2.14', 'tensorflow-text': 'tensorflow-text<2.14', 'tf2onnx': 'tf2onnx', 'timeout-decorator': 'timeout-decorator', 'timm': 'timm', 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14', 'torch': 'torch>=1.9,!=1.12.0', 'torchaudio': 'torchaudio', 'torchvision': 'torchvision', 'pyctcdecode': 'pyctcdecode>=0.4.0', 'tqdm': 'tqdm>=4.27', 'unidic': 'unidic>=1.0.2', 'unidic_lite': 'unidic_lite>=1.0.7', 'urllib3': 'urllib3<2.0.0', 'uvicorn': 'uvicorn', }
693
0
from itertools import permutations def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if num[3] % 2 != 0: return False if (num[2] + num[3] + num[4]) % 3 != 0: return False if num[5] % 5 != 0: return False lowerCAmelCase : int = [7, 1_1, 1_3, 1_7] for i, test in enumerate(SCREAMING_SNAKE_CASE__ ): if (num[i + 4] * 1_0_0 + num[i + 5] * 1_0 + num[i + 6]) % test != 0: return False return True def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ = 1_0 ): '''simple docstring''' return sum( int("""""".join(map(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) ) ) for num in permutations(range(SCREAMING_SNAKE_CASE__ ) ) if is_substring_divisible(SCREAMING_SNAKE_CASE__ ) ) if __name__ == "__main__": print(F'''{solution() = }''')
702
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase : Union[str, Any] ={ 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'], 'tokenization_roformer': ['RoFormerTokenizer'], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =['RoFormerTokenizerFast'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Optional[int] =[ 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'RoFormerForCausalLM', 'RoFormerForMaskedLM', 'RoFormerForMultipleChoice', 'RoFormerForQuestionAnswering', 'RoFormerForSequenceClassification', 'RoFormerForTokenClassification', 'RoFormerLayer', 'RoFormerModel', 'RoFormerPreTrainedModel', 'load_tf_weights_in_roformer', ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'TFRoFormerForCausalLM', 'TFRoFormerForMaskedLM', 'TFRoFormerForMultipleChoice', 'TFRoFormerForQuestionAnswering', 'TFRoFormerForSequenceClassification', 'TFRoFormerForTokenClassification', 'TFRoFormerLayer', 'TFRoFormerModel', 'TFRoFormerPreTrainedModel', ] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : int =[ 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'FlaxRoFormerForMaskedLM', 'FlaxRoFormerForMultipleChoice', 'FlaxRoFormerForQuestionAnswering', 'FlaxRoFormerForSequenceClassification', 'FlaxRoFormerForTokenClassification', 'FlaxRoFormerModel', 'FlaxRoFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig from .tokenization_roformer import RoFormerTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_roformer_fast import RoFormerTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_roformer import ( ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, RoFormerForCausalLM, RoFormerForMaskedLM, RoFormerForMultipleChoice, RoFormerForQuestionAnswering, RoFormerForSequenceClassification, RoFormerForTokenClassification, RoFormerLayer, RoFormerModel, RoFormerPreTrainedModel, load_tf_weights_in_roformer, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_roformer import ( TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerLayer, TFRoFormerModel, TFRoFormerPreTrainedModel, ) try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_roformer import ( FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, FlaxRoFormerForMaskedLM, FlaxRoFormerForMultipleChoice, FlaxRoFormerForQuestionAnswering, FlaxRoFormerForSequenceClassification, FlaxRoFormerForTokenClassification, FlaxRoFormerModel, FlaxRoFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Tuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
0
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_sentencepiece_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase : Tuple ={"""configuration_reformer""": ["""REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP""", """ReformerConfig"""]} try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : List[str] =["""ReformerTokenizer"""] try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Union[str, Any] =["""ReformerTokenizerFast"""] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : List[str] =[ """REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST""", """ReformerAttention""", """ReformerForMaskedLM""", """ReformerForQuestionAnswering""", """ReformerForSequenceClassification""", """ReformerLayer""", """ReformerModel""", """ReformerModelWithLMHead""", """ReformerPreTrainedModel""", ] if TYPE_CHECKING: from .configuration_reformer import REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, ReformerConfig try: if not is_sentencepiece_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_reformer import ReformerTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_reformer_fast import ReformerTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_reformer import ( REFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, ReformerAttention, ReformerForMaskedLM, ReformerForQuestionAnswering, ReformerForSequenceClassification, ReformerLayer, ReformerModel, ReformerModelWithLMHead, ReformerPreTrainedModel, ) else: import sys lowerCAmelCase : str =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
703
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return int(input_a == input_a == 0 ) def _UpperCAmelCase ( ): '''simple docstring''' print("""Truth Table of NOR Gate:""" ) print("""| Input 1 | Input 2 | Output |""" ) print(F"""| 0 | 0 | {nor_gate(0 ,0 )} |""" ) print(F"""| 0 | 1 | {nor_gate(0 ,1 )} |""" ) print(F"""| 1 | 0 | {nor_gate(1 ,0 )} |""" ) print(F"""| 1 | 1 | {nor_gate(1 ,1 )} |""" ) if __name__ == "__main__": import doctest doctest.testmod() main()
693
0
import logging import os from dataclasses import dataclass, field from typing import Dict, Optional import datasets import numpy as np import tensorflow as tf from transformers import ( AutoConfig, AutoTokenizer, EvalPrediction, HfArgumentParser, PreTrainedTokenizer, TFAutoModelForSequenceClassification, TFTrainer, TFTrainingArguments, ) from transformers.utils import logging as hf_logging hf_logging.set_verbosity_info() hf_logging.enable_default_handler() hf_logging.enable_explicit_format() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = None ,): '''simple docstring''' lowerCAmelCase : str = {} if train_file is not None: lowerCAmelCase : Optional[Any] = [train_file] if eval_file is not None: lowerCAmelCase : int = [eval_file] if test_file is not None: lowerCAmelCase : int = [test_file] lowerCAmelCase : Optional[int] = datasets.load_dataset("""csv""" ,data_files=_lowerCamelCase ) lowerCAmelCase : List[str] = list(ds[list(files.keys() )[0]].features.keys() ) lowerCAmelCase : int = features_name.pop(_lowerCamelCase ) lowerCAmelCase : Optional[Any] = list(set(ds[list(files.keys() )[0]][label_name] ) ) lowerCAmelCase : List[str] = {label: i for i, label in enumerate(_lowerCamelCase )} lowerCAmelCase : Optional[int] = tokenizer.model_input_names lowerCAmelCase : Any = {} if len(_lowerCamelCase ) == 1: for k in files.keys(): lowerCAmelCase : List[Any] = ds[k].map( lambda SCREAMING_SNAKE_CASE__ : tokenizer.batch_encode_plus( example[features_name[0]] ,truncation=_lowerCamelCase ,max_length=_lowerCamelCase ,padding="""max_length""" ) ,batched=_lowerCamelCase ,) elif len(_lowerCamelCase ) == 2: for k in files.keys(): lowerCAmelCase : Any = ds[k].map( lambda SCREAMING_SNAKE_CASE__ : tokenizer.batch_encode_plus( (example[features_name[0]], example[features_name[1]]) ,truncation=_lowerCamelCase ,max_length=_lowerCamelCase ,padding="""max_length""" ,) ,batched=_lowerCamelCase ,) def gen_train(): for ex in transformed_ds[datasets.Split.TRAIN]: lowerCAmelCase : Tuple = {k: v for k, v in ex.items() if k in input_names} lowerCAmelCase : List[Any] = labelaid[ex[label_name]] yield (d, label) def gen_val(): for ex in transformed_ds[datasets.Split.VALIDATION]: lowerCAmelCase : int = {k: v for k, v in ex.items() if k in input_names} lowerCAmelCase : Optional[int] = labelaid[ex[label_name]] yield (d, label) def gen_test(): for ex in transformed_ds[datasets.Split.TEST]: lowerCAmelCase : int = {k: v for k, v in ex.items() if k in input_names} lowerCAmelCase : Optional[Any] = labelaid[ex[label_name]] yield (d, label) lowerCAmelCase : Tuple = ( tf.data.Dataset.from_generator( _lowerCamelCase ,({k: tf.intaa for k in input_names}, tf.intaa) ,({k: tf.TensorShape([None] ) for k in input_names}, tf.TensorShape([] )) ,) if datasets.Split.TRAIN in transformed_ds else None ) if train_ds is not None: lowerCAmelCase : Any = train_ds.apply(tf.data.experimental.assert_cardinality(len(ds[datasets.Split.TRAIN] ) ) ) lowerCAmelCase : Optional[Any] = ( tf.data.Dataset.from_generator( _lowerCamelCase ,({k: tf.intaa for k in input_names}, tf.intaa) ,({k: tf.TensorShape([None] ) for k in input_names}, tf.TensorShape([] )) ,) if datasets.Split.VALIDATION in transformed_ds else None ) if val_ds is not None: lowerCAmelCase : Union[str, Any] = val_ds.apply(tf.data.experimental.assert_cardinality(len(ds[datasets.Split.VALIDATION] ) ) ) lowerCAmelCase : Dict = ( tf.data.Dataset.from_generator( _lowerCamelCase ,({k: tf.intaa for k in input_names}, tf.intaa) ,({k: tf.TensorShape([None] ) for k in input_names}, tf.TensorShape([] )) ,) if datasets.Split.TEST in transformed_ds else None ) if test_ds is not None: lowerCAmelCase : Dict = test_ds.apply(tf.data.experimental.assert_cardinality(len(ds[datasets.Split.TEST] ) ) ) return train_ds, val_ds, test_ds, labelaid lowerCAmelCase : List[Any] =logging.getLogger(__name__) @dataclass class _a : _UpperCamelCase: List[Any] = field(metadata={"help": "Which column contains the label"} ) _UpperCamelCase: Union[str, Any] = field(default=lowercase__ , metadata={"help": "The path of the training file"} ) _UpperCamelCase: Union[str, Any] = field(default=lowercase__ , metadata={"help": "The path of the development file"} ) _UpperCamelCase: Optional[int] = field(default=lowercase__ , metadata={"help": "The path of the test file"} ) _UpperCamelCase: Tuple = field( default=128 , metadata={ "help": ( "The maximum total input sequence length after tokenization. Sequences longer " "than this will be truncated, sequences shorter will be padded." ) } , ) _UpperCamelCase: Union[str, Any] = field( default=lowercase__ , metadata={"help": "Overwrite the cached training and evaluation sets"} ) @dataclass class _a : _UpperCamelCase: int = field( metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"} ) _UpperCamelCase: Optional[int] = field( default=lowercase__ , metadata={"help": "Pretrained config name or path if not the same as model_name"} ) _UpperCamelCase: str = field( default=lowercase__ , metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"} ) _UpperCamelCase: List[Any] = field(default=lowercase__ , metadata={"help": "Set this flag to use fast tokenization."} ) # If you want to tweak more attributes on your tokenizer, you should do it in a distinct script, # or just modify its tokenizer_config.json. _UpperCamelCase: Optional[Any] = field( default=lowercase__ , metadata={"help": "Where do you want to store the pretrained models downloaded from huggingface.co"} , ) def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = HfArgumentParser((ModelArguments, DataTrainingArguments, TFTrainingArguments) ) lowerCAmelCase : List[Any] = parser.parse_args_into_dataclasses() if ( os.path.exists(training_args.output_dir ) and os.listdir(training_args.output_dir ) and training_args.do_train and not training_args.overwrite_output_dir ): raise ValueError( F"""Output directory ({training_args.output_dir}) already exists and is not empty. Use""" """ --overwrite_output_dir to overcome.""" ) # Setup logging logging.basicConfig( format="""%(asctime)s - %(levelname)s - %(name)s - %(message)s""" ,datefmt="""%m/%d/%Y %H:%M:%S""" ,level=logging.INFO ,) logger.info( F"""n_replicas: {training_args.n_replicas}, distributed training: {bool(training_args.n_replicas > 1 )}, """ F"""16-bits training: {training_args.fpaa}""" ) logger.info(F"""Training/evaluation parameters {training_args}""" ) # Load pretrained model and tokenizer # # Distributed training: # The .from_pretrained methods guarantee that only one local process can concurrently # download model & vocab. lowerCAmelCase : Any = AutoTokenizer.from_pretrained( model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path ,cache_dir=model_args.cache_dir ,) lowerCAmelCase : Union[str, Any] = get_tfds( train_file=data_args.train_file ,eval_file=data_args.dev_file ,test_file=data_args.test_file ,tokenizer=_lowerCamelCase ,label_column_id=data_args.label_column_id ,max_seq_length=data_args.max_seq_length ,) lowerCAmelCase : str = AutoConfig.from_pretrained( model_args.config_name if model_args.config_name else model_args.model_name_or_path ,num_labels=len(_lowerCamelCase ) ,labelaid=_lowerCamelCase ,idalabel={id: label for label, id in labelaid.items()} ,finetuning_task="""text-classification""" ,cache_dir=model_args.cache_dir ,) with training_args.strategy.scope(): lowerCAmelCase : Optional[Any] = TFAutoModelForSequenceClassification.from_pretrained( model_args.model_name_or_path ,from_pt=bool(""".bin""" in model_args.model_name_or_path ) ,config=_lowerCamelCase ,cache_dir=model_args.cache_dir ,) def compute_metrics(SCREAMING_SNAKE_CASE__ ) -> Dict: lowerCAmelCase : Dict = np.argmax(p.predictions ,axis=1 ) return {"acc": (preds == p.label_ids).mean()} # Initialize our Trainer lowerCAmelCase : str = TFTrainer( model=_lowerCamelCase ,args=_lowerCamelCase ,train_dataset=_lowerCamelCase ,eval_dataset=_lowerCamelCase ,compute_metrics=_lowerCamelCase ,) # Training if training_args.do_train: trainer.train() trainer.save_model() tokenizer.save_pretrained(training_args.output_dir ) # Evaluation lowerCAmelCase : Dict = {} if training_args.do_eval: logger.info("""*** Evaluate ***""" ) lowerCAmelCase : str = trainer.evaluate() lowerCAmelCase : Union[str, Any] = os.path.join(training_args.output_dir ,"""eval_results.txt""" ) with open(_lowerCamelCase ,"""w""" ) as writer: logger.info("""***** Eval results *****""" ) for key, value in result.items(): logger.info(F""" {key} = {value}""" ) writer.write(F"""{key} = {value}\n""" ) results.update(_lowerCamelCase ) return results if __name__ == "__main__": main()
704
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available lowerCAmelCase : int ={ 'configuration_poolformer': [ 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'PoolFormerConfig', 'PoolFormerOnnxConfig', ] } try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : List[Any] =['PoolFormerFeatureExtractor'] lowerCAmelCase : List[str] =['PoolFormerImageProcessor'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'PoolFormerForImageClassification', 'PoolFormerModel', 'PoolFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_poolformer import ( POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, PoolFormerConfig, PoolFormerOnnxConfig, ) try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .feature_extraction_poolformer import PoolFormerFeatureExtractor from .image_processing_poolformer import PoolFormerImageProcessor try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_poolformer import ( POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, PoolFormerForImageClassification, PoolFormerModel, PoolFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure)
693
0
import json import os import tempfile from transformers.testing_utils import check_json_file_has_correct_format class _a : _UpperCamelCase: str = None def _snake_case ( self ) -> int: lowerCAmelCase : Optional[int] = self.feature_extraction_class(**self.feat_extract_dict ) lowerCAmelCase : Dict = json.loads(feat_extract.to_json_string() ) for key, value in self.feat_extract_dict.items(): self.assertEqual(obj[key] , _a ) def _snake_case ( self ) -> List[str]: lowerCAmelCase : int = self.feature_extraction_class(**self.feat_extract_dict ) with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : Tuple = os.path.join(_a , """feat_extract.json""" ) feat_extract_first.to_json_file(_a ) lowerCAmelCase : List[str] = self.feature_extraction_class.from_json_file(_a ) self.assertEqual(feat_extract_second.to_dict() , feat_extract_first.to_dict() ) def _snake_case ( self ) -> List[Any]: lowerCAmelCase : List[str] = self.feature_extraction_class(**self.feat_extract_dict ) with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : Optional[int] = feat_extract_first.save_pretrained(_a )[0] check_json_file_has_correct_format(_a ) lowerCAmelCase : Any = self.feature_extraction_class.from_pretrained(_a ) self.assertEqual(feat_extract_second.to_dict() , feat_extract_first.to_dict() ) def _snake_case ( self ) -> Tuple: lowerCAmelCase : List[Any] = self.feature_extraction_class() self.assertIsNotNone(_a )
705
import os import string import sys lowerCAmelCase : Optional[int] =1 << 8 lowerCAmelCase : List[Any] ={ 'tab': ord('\t'), 'newline': ord('\r'), 'esc': 27, 'up': 65 + ARROW_KEY_FLAG, 'down': 66 + ARROW_KEY_FLAG, 'right': 67 + ARROW_KEY_FLAG, 'left': 68 + ARROW_KEY_FLAG, 'mod_int': 91, 'undefined': sys.maxsize, 'interrupt': 3, 'insert': 50, 'delete': 51, 'pg_up': 53, 'pg_down': 54, } lowerCAmelCase : Optional[Any] =KEYMAP['up'] lowerCAmelCase : Tuple =KEYMAP['left'] if sys.platform == "win32": lowerCAmelCase : Dict =[] lowerCAmelCase : int ={ b'\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\x00H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\x00P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\x00M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG, b'\x00K': KEYMAP['left'] - ARROW_KEY_FLAG, } for i in range(10): lowerCAmelCase : Optional[Any] =ord(str(i)) def _UpperCAmelCase ( ): '''simple docstring''' if os.name == "nt": import msvcrt lowerCAmelCase : Any = """mbcs""" # Flush the keyboard buffer while msvcrt.kbhit(): msvcrt.getch() if len(SCREAMING_SNAKE_CASE__ ) == 0: # Read the keystroke lowerCAmelCase : int = msvcrt.getch() # If it is a prefix char, get second part if ch in (b"\x00", b"\xe0"): lowerCAmelCase : Tuple = ch + msvcrt.getch() # Translate actual Win chars to bullet char types try: lowerCAmelCase : str = chr(WIN_KEYMAP[cha] ) WIN_CH_BUFFER.append(chr(KEYMAP["""mod_int"""] ) ) WIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ ) if ord(SCREAMING_SNAKE_CASE__ ) in ( KEYMAP["insert"] - 1 << 9, KEYMAP["delete"] - 1 << 9, KEYMAP["pg_up"] - 1 << 9, KEYMAP["pg_down"] - 1 << 9, ): WIN_CH_BUFFER.append(chr(1_2_6 ) ) lowerCAmelCase : Optional[Any] = chr(KEYMAP["""esc"""] ) except KeyError: lowerCAmelCase : Optional[int] = cha[1] else: lowerCAmelCase : Any = ch.decode(SCREAMING_SNAKE_CASE__ ) else: lowerCAmelCase : Optional[int] = WIN_CH_BUFFER.pop(0 ) elif os.name == "posix": import termios import tty lowerCAmelCase : List[Any] = sys.stdin.fileno() lowerCAmelCase : str = termios.tcgetattr(SCREAMING_SNAKE_CASE__ ) try: tty.setraw(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[Any] = sys.stdin.read(1 ) finally: termios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ ) return ch def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP["interrupt"], KEYMAP["newline"]]: return char elif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["esc"]: lowerCAmelCase : int = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["mod_int"]: lowerCAmelCase : Tuple = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP["arrow_begin"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP["arrow_end"] - ARROW_KEY_FLAG: return chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG ) else: return KEYMAP["undefined"] else: return get_raw_chars() else: if char in string.printable: return char else: return KEYMAP["undefined"]
693
0
import json import os import re import shutil import tempfile import unittest from typing import Tuple from transformers import AddedToken, BatchEncoding, ByTaTokenizer from transformers.utils import cached_property, is_tf_available, is_torch_available from ...test_tokenization_common import TokenizerTesterMixin if is_torch_available(): lowerCAmelCase : Optional[Any] ='pt' elif is_tf_available(): lowerCAmelCase : Dict ='tf' else: lowerCAmelCase : List[Any] ='jax' class _a ( __a , unittest.TestCase ): _UpperCamelCase: Tuple = ByTaTokenizer _UpperCamelCase: Dict = False def _snake_case ( self ) -> Optional[int]: super().setUp() lowerCAmelCase : List[str] = ByTaTokenizer() tokenizer.save_pretrained(self.tmpdirname ) @cached_property def _snake_case ( self ) -> List[Any]: return ByTaTokenizer.from_pretrained("""google/byt5-small""" ) def _snake_case ( self , **lowercase_ ) -> Union[str, Any]: return self.tokenizer_class.from_pretrained(self.tmpdirname , **a_ ) def _snake_case ( self , lowercase_ , lowercase_=False , lowercase_=20 , lowercase_=5 ) -> Dict: lowerCAmelCase : List[Any] = [] for i in range(len(a_ ) ): try: lowerCAmelCase : Optional[Any] = tokenizer.decode([i] , clean_up_tokenization_spaces=a_ ) except UnicodeDecodeError: pass toks.append((i, tok) ) lowerCAmelCase : str = list(filter(lambda lowercase_ : re.match(R"""^[ a-zA-Z]+$""" , t[1] ) , a_ ) ) lowerCAmelCase : List[Any] = list(filter(lambda lowercase_ : [t[0]] == tokenizer.encode(t[1] , add_special_tokens=a_ ) , a_ ) ) if max_length is not None and len(a_ ) > max_length: lowerCAmelCase : Tuple = toks[:max_length] if min_length is not None and len(a_ ) < min_length and len(a_ ) > 0: while len(a_ ) < min_length: lowerCAmelCase : List[str] = toks + toks # toks_str = [t[1] for t in toks] lowerCAmelCase : Tuple = [t[0] for t in toks] # Ensure consistency lowerCAmelCase : Optional[Any] = tokenizer.decode(a_ , clean_up_tokenization_spaces=a_ ) if " " not in output_txt and len(a_ ) > 1: lowerCAmelCase : Dict = ( tokenizer.decode([toks_ids[0]] , clean_up_tokenization_spaces=a_ ) + """ """ + tokenizer.decode(toks_ids[1:] , clean_up_tokenization_spaces=a_ ) ) if with_prefix_space: lowerCAmelCase : Union[str, Any] = """ """ + output_txt lowerCAmelCase : Any = tokenizer.encode(a_ , add_special_tokens=a_ ) return output_txt, output_ids def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[int] = self.ta_base_tokenizer lowerCAmelCase : Optional[Any] = tokenizer(["""hi</s>""", """I went to the gym</s>""", """</s>"""] ) lowerCAmelCase : int = tokenizer(["""hi""", """I went to the gym""", """"""] ) self.assertListEqual(batch_with_eos_added["""input_ids"""] , batch_without_eos_added["""input_ids"""] ) def _snake_case ( self ) -> int: lowerCAmelCase : List[str] = self.ta_base_tokenizer lowerCAmelCase : Tuple = """Unicode €.""" lowerCAmelCase : List[Any] = tokenizer(a_ ) lowerCAmelCase : Tuple = [88, 113, 108, 102, 114, 103, 104, 35, 229, 133, 175, 49, 1] self.assertEqual(encoded["""input_ids"""] , a_ ) # decoding lowerCAmelCase : Tuple = tokenizer.decode(a_ ) self.assertEqual(a_ , """Unicode €.</s>""" ) lowerCAmelCase : Tuple = tokenizer("""e è é ê ë""" ) lowerCAmelCase : List[Any] = [104, 35, 198, 171, 35, 198, 172, 35, 198, 173, 35, 198, 174, 1] self.assertEqual(encoded["""input_ids"""] , a_ ) # decoding lowerCAmelCase : int = tokenizer.decode(a_ ) self.assertEqual(a_ , """e è é ê ë</s>""" ) # encode/decode, but with `encode` instead of `__call__` self.assertEqual(tokenizer.decode(tokenizer.encode("""e è é ê ë""" ) ) , """e è é ê ë</s>""" ) def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self.ta_base_tokenizer lowerCAmelCase : List[Any] = ["""A long paragraph for summarization.""", """Another paragraph for summarization."""] # fmt: off lowerCAmelCase : Union[str, Any] = [68, 35, 111, 114, 113, 106, 35, 115, 100, 117, 100, 106, 117, 100, 115, 107, 35, 105, 114, 117, 35, 118, 120, 112, 112, 100, 117, 108, 125, 100, 119, 108, 114, 113, 49, 1, 0] # fmt: on lowerCAmelCase : int = tokenizer(a_ , padding=a_ , return_tensors=a_ ) self.assertIsInstance(a_ , a_ ) if FRAMEWORK != "jax": lowerCAmelCase : List[str] = list(batch.input_ids.numpy()[0] ) else: lowerCAmelCase : Optional[int] = list(batch.input_ids.tolist()[0] ) self.assertListEqual(a_ , a_ ) self.assertEqual((2, 37) , batch.input_ids.shape ) self.assertEqual((2, 37) , batch.attention_mask.shape ) def _snake_case ( self ) -> int: lowerCAmelCase : List[Any] = self.ta_base_tokenizer lowerCAmelCase : Optional[int] = ["""A long paragraph for summarization.""", """Another paragraph for summarization."""] lowerCAmelCase : Tuple = tokenizer(a_ , padding=a_ , return_tensors=a_ ) # check if input_ids are returned and no decoder_input_ids self.assertIn("""input_ids""" , a_ ) self.assertIn("""attention_mask""" , a_ ) self.assertNotIn("""decoder_input_ids""" , a_ ) self.assertNotIn("""decoder_attention_mask""" , a_ ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Union[str, Any] = self.ta_base_tokenizer lowerCAmelCase : Dict = [ """Summary of the text.""", """Another summary.""", ] lowerCAmelCase : Optional[int] = tokenizer( text_target=a_ , max_length=32 , padding="""max_length""" , truncation=a_ , return_tensors=a_ ) self.assertEqual(32 , targets["""input_ids"""].shape[1] ) def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : int = self.ta_base_tokenizer lowerCAmelCase : Optional[int] = ["""A long paragraph for summarization. </s>"""] lowerCAmelCase : Dict = ["""Summary of the text. </s>"""] # fmt: off lowerCAmelCase : Optional[int] = [68, 35, 111, 114, 113, 106, 35, 115, 100, 117, 100, 106, 117, 100, 115, 107, 35, 105, 114, 117, 35, 118, 120, 112, 112, 100, 117, 108, 125, 100, 119, 108, 114, 113, 49, 35, 1] lowerCAmelCase : Optional[Any] = [86, 120, 112, 112, 100, 117, 124, 35, 114, 105, 35, 119, 107, 104, 35, 119, 104, 123, 119, 49, 35, 1] # fmt: on lowerCAmelCase : Optional[Any] = tokenizer(a_ , text_target=a_ ) self.assertEqual(a_ , batch["""input_ids"""][0] ) self.assertEqual(a_ , batch["""labels"""][0] ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : List[str] = self.get_tokenizers() for tokenizer in tokenizers: with self.subTest(f"""{tokenizer.__class__.__name__}""" ): self.assertNotEqual(tokenizer.model_max_length , 42 ) # Now let's start the test lowerCAmelCase : str = self.get_tokenizers() for tokenizer in tokenizers: with self.subTest(f"""{tokenizer.__class__.__name__}""" ): # Isolate this from the other tests because we save additional tokens/etc lowerCAmelCase : List[str] = tempfile.mkdtemp() lowerCAmelCase : List[str] = """ He is very happy, UNwant\u00E9d,running""" lowerCAmelCase : Any = tokenizer.encode(a_ , add_special_tokens=a_ ) tokenizer.save_pretrained(a_ ) lowerCAmelCase : List[Any] = tokenizer.__class__.from_pretrained(a_ ) lowerCAmelCase : Dict = after_tokenizer.encode(a_ , add_special_tokens=a_ ) self.assertListEqual(a_ , a_ ) shutil.rmtree(a_ ) lowerCAmelCase : Tuple = self.get_tokenizers(model_max_length=42 ) for tokenizer in tokenizers: with self.subTest(f"""{tokenizer.__class__.__name__}""" ): # Isolate this from the other tests because we save additional tokens/etc lowerCAmelCase : Union[str, Any] = tempfile.mkdtemp() lowerCAmelCase : List[Any] = """ He is very happy, UNwant\u00E9d,running""" tokenizer.add_tokens(["""bim""", """bambam"""] ) lowerCAmelCase : Optional[Any] = tokenizer.additional_special_tokens additional_special_tokens.append("""new_additional_special_token""" ) tokenizer.add_special_tokens({"""additional_special_tokens""": additional_special_tokens} ) lowerCAmelCase : Any = tokenizer.encode(a_ , add_special_tokens=a_ ) tokenizer.save_pretrained(a_ ) lowerCAmelCase : Optional[Any] = tokenizer.__class__.from_pretrained(a_ ) lowerCAmelCase : str = after_tokenizer.encode(a_ , add_special_tokens=a_ ) self.assertListEqual(a_ , a_ ) self.assertIn("""new_additional_special_token""" , after_tokenizer.additional_special_tokens ) self.assertEqual(after_tokenizer.model_max_length , 42 ) lowerCAmelCase : Optional[int] = tokenizer.__class__.from_pretrained(a_ , model_max_length=43 ) self.assertEqual(tokenizer.model_max_length , 43 ) shutil.rmtree(a_ ) def _snake_case ( self ) -> int: lowerCAmelCase : Optional[Any] = [] if self.test_slow_tokenizer: tokenizer_list.append((self.tokenizer_class, self.get_tokenizer()) ) if self.test_rust_tokenizer: tokenizer_list.append((self.rust_tokenizer_class, self.get_rust_tokenizer()) ) for tokenizer_class, tokenizer_utils in tokenizer_list: with tempfile.TemporaryDirectory() as tmp_dir: tokenizer_utils.save_pretrained(a_ ) with open(os.path.join(a_ , """special_tokens_map.json""" ) , encoding="""utf-8""" ) as json_file: lowerCAmelCase : Union[str, Any] = json.load(a_ ) with open(os.path.join(a_ , """tokenizer_config.json""" ) , encoding="""utf-8""" ) as json_file: lowerCAmelCase : List[Any] = json.load(a_ ) lowerCAmelCase : int = [f"""<extra_id_{i}>""" for i in range(125 )] lowerCAmelCase : Optional[int] = added_tokens_extra_ids + [ """an_additional_special_token""" ] lowerCAmelCase : Dict = added_tokens_extra_ids + [ """an_additional_special_token""" ] with open(os.path.join(a_ , """special_tokens_map.json""" ) , """w""" , encoding="""utf-8""" ) as outfile: json.dump(a_ , a_ ) with open(os.path.join(a_ , """tokenizer_config.json""" ) , """w""" , encoding="""utf-8""" ) as outfile: json.dump(a_ , a_ ) # the following checks allow us to verify that our test works as expected, i.e. that the tokenizer takes # into account the new value of additional_special_tokens given in the "tokenizer_config.json" and # "special_tokens_map.json" files lowerCAmelCase : Optional[int] = tokenizer_class.from_pretrained( a_ , ) self.assertIn( """an_additional_special_token""" , tokenizer_without_change_in_init.additional_special_tokens ) # self.assertIn("an_additional_special_token",tokenizer_without_change_in_init.get_vocab()) # ByT5Tokenization no vocab self.assertEqual( ["""an_additional_special_token"""] , tokenizer_without_change_in_init.convert_ids_to_tokens( tokenizer_without_change_in_init.convert_tokens_to_ids(["""an_additional_special_token"""] ) ) , ) # Now we test that we can change the value of additional_special_tokens in the from_pretrained lowerCAmelCase : Union[str, Any] = added_tokens_extra_ids + [AddedToken("""a_new_additional_special_token""" , lstrip=a_ )] lowerCAmelCase : List[Any] = tokenizer_class.from_pretrained( a_ , additional_special_tokens=a_ , ) self.assertIn("""a_new_additional_special_token""" , tokenizer.additional_special_tokens ) self.assertEqual( ["""a_new_additional_special_token"""] , tokenizer.convert_ids_to_tokens( tokenizer.convert_tokens_to_ids(["""a_new_additional_special_token"""] ) ) , ) def _snake_case ( self ) -> List[str]: lowerCAmelCase : List[Any] = [] if self.test_slow_tokenizer: tokenizer_list.append((self.tokenizer_class, self.get_tokenizer()) ) if self.test_rust_tokenizer: tokenizer_list.append((self.rust_tokenizer_class, self.get_rust_tokenizer()) ) for tokenizer_class, tokenizer_utils in tokenizer_list: with tempfile.TemporaryDirectory() as tmp_dir: tokenizer_utils.save_pretrained(a_ ) lowerCAmelCase : Optional[Any] = tokenizer_class.from_pretrained(a_ ) self.assertTrue(tokenizer.decode([255] ) == """""" ) def _snake_case ( self ) -> Any: pass def _snake_case ( self ) -> Any: pass def _snake_case ( self ) -> Union[str, Any]: pass def _snake_case ( self ) -> List[str]: pass def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Optional[Any] = self.get_tokenizers(fast=a_ , do_lower_case=a_ ) for tokenizer in tokenizers: with self.subTest(f"""{tokenizer.__class__.__name__}""" ): lowerCAmelCase : Dict = ["""t""", """h""", """i""", """s""", """ """, """i""", """s""", """ """, """a""", """ """, """t""", """e""", """x""", """t""", """</s>"""] lowerCAmelCase : List[Any] = tokenizer.convert_tokens_to_string(a_ ) self.assertIsInstance(a_ , a_ ) def _snake_case ( self ) -> Tuple: lowerCAmelCase : str = self.get_tokenizers() for tokenizer in tokenizers: with self.subTest(f"""{tokenizer.__class__.__name__}""" ): lowerCAmelCase : Optional[int] = [ """bos_token""", """eos_token""", """unk_token""", """sep_token""", """pad_token""", """cls_token""", """mask_token""", ] lowerCAmelCase : Any = 0 lowerCAmelCase : Union[str, Any] = tokenizer.convert_ids_to_tokens( a_ , skip_special_tokens=a_ ) for attr in attributes_list: setattr(a_ , attr + """_id""" , a_ ) self.assertEqual(getattr(a_ , a_ ) , a_ ) self.assertEqual(getattr(a_ , attr + """_id""" ) , a_ ) setattr(a_ , attr + """_id""" , a_ ) self.assertEqual(getattr(a_ , a_ ) , a_ ) self.assertEqual(getattr(a_ , attr + """_id""" ) , a_ ) setattr(a_ , """additional_special_tokens_ids""" , [] ) self.assertListEqual(getattr(a_ , """additional_special_tokens""" ) , [] ) self.assertListEqual(getattr(a_ , """additional_special_tokens_ids""" ) , [] ) setattr(a_ , """additional_special_tokens_ids""" , [token_id_to_test_setters] ) self.assertListEqual(getattr(a_ , """additional_special_tokens""" ) , [token_to_test_setters] ) self.assertListEqual(getattr(a_ , """additional_special_tokens_ids""" ) , [token_id_to_test_setters] )
706
# Imports import numpy as np class _a : def __init__( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> List[Any]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) def _snake_case ( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Union[str, Any]: if red is not None: lowerCAmelCase : str = red if green is not None: lowerCAmelCase : Optional[int] = green if blue is not None: lowerCAmelCase : Optional[int] = blue if red_edge is not None: lowerCAmelCase : Tuple = red_edge if nir is not None: lowerCAmelCase : Union[str, Any] = nir return True def _snake_case ( self , lowercase_="" , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Optional[int]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) lowerCAmelCase : int = { """ARVI2""": self.arvaa, """CCCI""": self.ccci, """CVI""": self.cvi, """GLI""": self.gli, """NDVI""": self.ndvi, """BNDVI""": self.bndvi, """redEdgeNDVI""": self.red_edge_ndvi, """GNDVI""": self.gndvi, """GBNDVI""": self.gbndvi, """GRNDVI""": self.grndvi, """RBNDVI""": self.rbndvi, """PNDVI""": self.pndvi, """ATSAVI""": self.atsavi, """BWDRVI""": self.bwdrvi, """CIgreen""": self.ci_green, """CIrededge""": self.ci_rededge, """CI""": self.ci, """CTVI""": self.ctvi, """GDVI""": self.gdvi, """EVI""": self.evi, """GEMI""": self.gemi, """GOSAVI""": self.gosavi, """GSAVI""": self.gsavi, """Hue""": self.hue, """IVI""": self.ivi, """IPVI""": self.ipvi, """I""": self.i, """RVI""": self.rvi, """MRVI""": self.mrvi, """MSAVI""": self.m_savi, """NormG""": self.norm_g, """NormNIR""": self.norm_nir, """NormR""": self.norm_r, """NGRDI""": self.ngrdi, """RI""": self.ri, """S""": self.s, """IF""": self._if, """DVI""": self.dvi, """TVI""": self.tvi, """NDRE""": self.ndre, } try: return funcs[index]() except KeyError: print("""Index not in the list!""" ) return False def _snake_case ( self ) -> Dict: return -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red))) def _snake_case ( self ) -> Optional[Any]: return ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / ( (self.nir - self.red) / (self.nir + self.red) ) def _snake_case ( self ) -> List[str]: return self.nir * (self.red / (self.green**2)) def _snake_case ( self ) -> Tuple: return (2 * self.green - self.red - self.blue) / ( 2 * self.green + self.red + self.blue ) def _snake_case ( self ) -> Optional[int]: return (self.nir - self.red) / (self.nir + self.red) def _snake_case ( self ) -> List[str]: return (self.nir - self.blue) / (self.nir + self.blue) def _snake_case ( self ) -> int: return (self.redEdge - self.red) / (self.redEdge + self.red) def _snake_case ( self ) -> Optional[Any]: return (self.nir - self.green) / (self.nir + self.green) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.blue)) / ( self.nir + (self.green + self.blue) ) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.red)) / ( self.nir + (self.green + self.red) ) def _snake_case ( self ) -> int: return (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red)) def _snake_case ( self ) -> List[str]: return (self.nir - (self.green + self.red + self.blue)) / ( self.nir + (self.green + self.red + self.blue) ) def _snake_case ( self , lowercase_=0.0_8 , lowercase_=1.2_2 , lowercase_=0.0_3 ) -> int: return a * ( (self.nir - a * self.red - b) / (a * self.nir + self.red - a * b + x * (1 + a**2)) ) def _snake_case ( self ) -> Optional[Any]: return (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue) def _snake_case ( self ) -> Any: return (self.nir / self.green) - 1 def _snake_case ( self ) -> List[Any]: return (self.nir / self.redEdge) - 1 def _snake_case ( self ) -> str: return (self.red - self.blue) / self.red def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self.ndvi() return ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2)) def _snake_case ( self ) -> Optional[Any]: return self.nir - self.green def _snake_case ( self ) -> int: return 2.5 * ( (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1) ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Tuple = (2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / ( self.nir + self.red + 0.5 ) return n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red) def _snake_case ( self , lowercase_=0.1_6 ) -> Optional[int]: return (self.nir - self.green) / (self.nir + self.green + y) def _snake_case ( self , lowercase_=0.5 ) -> List[str]: return ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n) def _snake_case ( self ) -> Any: return np.arctan( ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) ) def _snake_case ( self , lowercase_=None , lowercase_=None ) -> List[Any]: return (self.nir - b) / (a * self.red) def _snake_case ( self ) -> Any: return (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1) def _snake_case ( self ) -> str: return (self.red + self.green + self.blue) / 3_0.5 def _snake_case ( self ) -> Union[str, Any]: return self.nir / self.red def _snake_case ( self ) -> Tuple: return (self.rvi() - 1) / (self.rvi() + 1) def _snake_case ( self ) -> Dict: return ( (2 * self.nir + 1) - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2) ) / 2 def _snake_case ( self ) -> List[Any]: return self.green / (self.nir + self.red + self.green) def _snake_case ( self ) -> int: return self.nir / (self.nir + self.red + self.green) def _snake_case ( self ) -> Dict: return self.red / (self.nir + self.red + self.green) def _snake_case ( self ) -> List[Any]: return (self.green - self.red) / (self.green + self.red) def _snake_case ( self ) -> Optional[int]: return (self.red - self.green) / (self.red + self.green) def _snake_case ( self ) -> Tuple: lowerCAmelCase : Any = np.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] ) lowerCAmelCase : Dict = np.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] ) return (max_value - min_value) / max_value def _snake_case ( self ) -> int: return (2 * self.red - self.green - self.blue) / (self.green - self.blue) def _snake_case ( self ) -> List[str]: return self.nir / self.red def _snake_case ( self ) -> int: return (self.ndvi() + 0.5) ** (1 / 2) def _snake_case ( self ) -> str: return (self.nir - self.redEdge) / (self.nir + self.redEdge)
693
0
from typing import Dict, List, Optional, Union import numpy as np from ...image_processing_utils import BaseImageProcessor, BatchFeature, get_size_dict from ...image_transforms import ( center_crop, convert_to_rgb, get_resize_output_image_size, normalize, rescale, resize, to_channel_dimension_format, ) from ...image_utils import ( OPENAI_CLIP_MEAN, OPENAI_CLIP_STD, ChannelDimension, ImageInput, PILImageResampling, make_list_of_images, to_numpy_array, valid_images, ) from ...utils import TensorType, is_vision_available, logging lowerCAmelCase : List[Any] =logging.get_logger(__name__) if is_vision_available(): import PIL class _a ( snake_case_ ): _UpperCamelCase: List[Any] = ["pixel_values"] def __init__( self , lowercase_ = True , lowercase_ = None , lowercase_ = PILImageResampling.BICUBIC , lowercase_ = True , lowercase_ = None , lowercase_ = True , lowercase_ = 1 / 255 , lowercase_ = True , lowercase_ = None , lowercase_ = None , lowercase_ = True , **lowercase_ , ) -> List[Any]: super().__init__(**__lowerCAmelCase ) lowerCAmelCase : Tuple = size if size is not None else {"""shortest_edge""": 224} lowerCAmelCase : List[str] = get_size_dict(__lowerCAmelCase , default_to_square=__lowerCAmelCase ) lowerCAmelCase : List[str] = crop_size if crop_size is not None else {"""height""": 224, """width""": 224} lowerCAmelCase : List[Any] = get_size_dict(__lowerCAmelCase , default_to_square=__lowerCAmelCase , param_name="""crop_size""" ) lowerCAmelCase : Any = do_resize lowerCAmelCase : Dict = size lowerCAmelCase : Union[str, Any] = resample lowerCAmelCase : Any = do_center_crop lowerCAmelCase : Optional[Any] = crop_size lowerCAmelCase : Tuple = do_rescale lowerCAmelCase : int = rescale_factor lowerCAmelCase : List[Any] = do_normalize lowerCAmelCase : Any = image_mean if image_mean is not None else OPENAI_CLIP_MEAN lowerCAmelCase : Union[str, Any] = image_std if image_std is not None else OPENAI_CLIP_STD lowerCAmelCase : int = do_convert_rgb def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = PILImageResampling.BICUBIC , lowercase_ = None , **lowercase_ , ) -> Any: lowerCAmelCase : Union[str, Any] = get_size_dict(__lowerCAmelCase , default_to_square=__lowerCAmelCase ) if "shortest_edge" not in size: raise ValueError(f"""The `size` parameter must contain the key `shortest_edge`. Got {size.keys()}""" ) lowerCAmelCase : Any = get_resize_output_image_size(__lowerCAmelCase , size=size["""shortest_edge"""] , default_to_square=__lowerCAmelCase ) return resize(__lowerCAmelCase , size=__lowerCAmelCase , resample=__lowerCAmelCase , data_format=__lowerCAmelCase , **__lowerCAmelCase ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ , ) -> Any: lowerCAmelCase : str = get_size_dict(__lowerCAmelCase ) if "height" not in size or "width" not in size: raise ValueError(f"""The `size` parameter must contain the keys (height, width). Got {size.keys()}""" ) return center_crop(__lowerCAmelCase , size=(size["""height"""], size["""width"""]) , data_format=__lowerCAmelCase , **__lowerCAmelCase ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ , ) -> List[str]: return rescale(__lowerCAmelCase , scale=__lowerCAmelCase , data_format=__lowerCAmelCase , **__lowerCAmelCase ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ = None , **lowercase_ , ) -> Union[str, Any]: return normalize(__lowerCAmelCase , mean=__lowerCAmelCase , std=__lowerCAmelCase , data_format=__lowerCAmelCase , **__lowerCAmelCase ) def _snake_case ( self , lowercase_ , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = None , lowercase_ = ChannelDimension.FIRST , **lowercase_ , ) -> Any: lowerCAmelCase : str = do_resize if do_resize is not None else self.do_resize lowerCAmelCase : List[Any] = size if size is not None else self.size lowerCAmelCase : Union[str, Any] = get_size_dict(__lowerCAmelCase , param_name="""size""" , default_to_square=__lowerCAmelCase ) lowerCAmelCase : Union[str, Any] = resample if resample is not None else self.resample lowerCAmelCase : Any = do_center_crop if do_center_crop is not None else self.do_center_crop lowerCAmelCase : Optional[int] = crop_size if crop_size is not None else self.crop_size lowerCAmelCase : List[Any] = get_size_dict(__lowerCAmelCase , param_name="""crop_size""" , default_to_square=__lowerCAmelCase ) lowerCAmelCase : Union[str, Any] = do_rescale if do_rescale is not None else self.do_rescale lowerCAmelCase : str = rescale_factor if rescale_factor is not None else self.rescale_factor lowerCAmelCase : Tuple = do_normalize if do_normalize is not None else self.do_normalize lowerCAmelCase : int = image_mean if image_mean is not None else self.image_mean lowerCAmelCase : Optional[Any] = image_std if image_std is not None else self.image_std lowerCAmelCase : Optional[int] = do_convert_rgb if do_convert_rgb is not None else self.do_convert_rgb lowerCAmelCase : Union[str, Any] = make_list_of_images(__lowerCAmelCase ) if not valid_images(__lowerCAmelCase ): raise ValueError( """Invalid image type. Must be of type PIL.Image.Image, numpy.ndarray, """ """torch.Tensor, tf.Tensor or jax.ndarray.""" ) if do_resize and size is None: raise ValueError("""Size must be specified if do_resize is True.""" ) if do_center_crop and crop_size is None: raise ValueError("""Crop size must be specified if do_center_crop is True.""" ) if do_rescale and rescale_factor is None: raise ValueError("""Rescale factor must be specified if do_rescale is True.""" ) if do_normalize and (image_mean is None or image_std is None): raise ValueError("""Image mean and std must be specified if do_normalize is True.""" ) # PIL RGBA images are converted to RGB if do_convert_rgb: lowerCAmelCase : int = [convert_to_rgb(__lowerCAmelCase ) for image in images] # All transformations expect numpy arrays. lowerCAmelCase : Tuple = [to_numpy_array(__lowerCAmelCase ) for image in images] if do_resize: lowerCAmelCase : int = [self.resize(image=__lowerCAmelCase , size=__lowerCAmelCase , resample=__lowerCAmelCase ) for image in images] if do_center_crop: lowerCAmelCase : Optional[Any] = [self.center_crop(image=__lowerCAmelCase , size=__lowerCAmelCase ) for image in images] if do_rescale: lowerCAmelCase : Optional[Any] = [self.rescale(image=__lowerCAmelCase , scale=__lowerCAmelCase ) for image in images] if do_normalize: lowerCAmelCase : Optional[int] = [self.normalize(image=__lowerCAmelCase , mean=__lowerCAmelCase , std=__lowerCAmelCase ) for image in images] lowerCAmelCase : str = [to_channel_dimension_format(__lowerCAmelCase , __lowerCAmelCase ) for image in images] lowerCAmelCase : Dict = {"""pixel_values""": images} return BatchFeature(data=__lowerCAmelCase , tensor_type=__lowerCAmelCase )
707
import argparse import json import math import os import time import traceback import zipfile from collections import Counter import requests def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[str] = None if token is not None: lowerCAmelCase : Union[str, Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[Any] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{workflow_run_id}/jobs?per_page=100""" lowerCAmelCase : Any = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) lowerCAmelCase : int = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : List[str] = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) return job_links except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = None if token is not None: lowerCAmelCase : str = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[int] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{worflow_run_id}/artifacts?per_page=100""" lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) lowerCAmelCase : Optional[int] = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : int = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) return artifacts except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = None if token is not None: lowerCAmelCase : Optional[Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : str = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = result.headers["""Location"""] lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = os.path.join(SCREAMING_SNAKE_CASE__ ,F"""{artifact_name}.zip""" ) with open(SCREAMING_SNAKE_CASE__ ,"""wb""" ) as fp: fp.write(response.content ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = [] lowerCAmelCase : Optional[int] = [] lowerCAmelCase : Optional[int] = None with zipfile.ZipFile(SCREAMING_SNAKE_CASE__ ) as z: for filename in z.namelist(): if not os.path.isdir(SCREAMING_SNAKE_CASE__ ): # read the file if filename in ["failures_line.txt", "summary_short.txt", "job_name.txt"]: with z.open(SCREAMING_SNAKE_CASE__ ) as f: for line in f: lowerCAmelCase : Optional[Any] = line.decode("""UTF-8""" ).strip() if filename == "failures_line.txt": try: # `error_line` is the place where `error` occurs lowerCAmelCase : str = line[: line.index(""": """ )] lowerCAmelCase : Optional[int] = line[line.index(""": """ ) + len(""": """ ) :] errors.append([error_line, error] ) except Exception: # skip un-related lines pass elif filename == "summary_short.txt" and line.startswith("""FAILED """ ): # `test` is the test method that failed lowerCAmelCase : Union[str, Any] = line[len("""FAILED """ ) :] failed_tests.append(SCREAMING_SNAKE_CASE__ ) elif filename == "job_name.txt": lowerCAmelCase : Union[str, Any] = line if len(SCREAMING_SNAKE_CASE__ ) != len(SCREAMING_SNAKE_CASE__ ): raise ValueError( F"""`errors` and `failed_tests` should have the same number of elements. Got {len(SCREAMING_SNAKE_CASE__ )} for `errors` """ F"""and {len(SCREAMING_SNAKE_CASE__ )} for `failed_tests` instead. The test reports in {artifact_zip_path} have some""" """ problem.""" ) lowerCAmelCase : Optional[int] = None if job_name and job_links: lowerCAmelCase : Optional[int] = job_links.get(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) # A list with elements of the form (line of error, error, failed test) lowerCAmelCase : Union[str, Any] = [x + [y] + [job_link] for x, y in zip(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )] return result def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : str = [] lowerCAmelCase : Union[str, Any] = [os.path.join(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) for p in os.listdir(SCREAMING_SNAKE_CASE__ ) if p.endswith(""".zip""" )] for p in paths: errors.extend(get_errors_from_single_artifact(SCREAMING_SNAKE_CASE__ ,job_links=SCREAMING_SNAKE_CASE__ ) ) return errors def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = Counter() counter.update([x[1] for x in logs] ) lowerCAmelCase : List[str] = counter.most_common() lowerCAmelCase : Union[str, Any] = {} for error, count in counts: if error_filter is None or error not in error_filter: lowerCAmelCase : List[Any] = {"""count""": count, """failed_tests""": [(x[2], x[0]) for x in logs if x[1] == error]} lowerCAmelCase : int = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = test.split("""::""" )[0] if test.startswith("""tests/models/""" ): lowerCAmelCase : str = test.split("""/""" )[2] else: lowerCAmelCase : List[Any] = None return test def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[Any] = [(x[0], x[1], get_model(x[2] )) for x in logs] lowerCAmelCase : int = [x for x in logs if x[2] is not None] lowerCAmelCase : Optional[Any] = {x[2] for x in logs} lowerCAmelCase : Dict = {} for test in tests: lowerCAmelCase : Optional[int] = Counter() # count by errors in `test` counter.update([x[1] for x in logs if x[2] == test] ) lowerCAmelCase : Tuple = counter.most_common() lowerCAmelCase : Union[str, Any] = {error: count for error, count in counts if (error_filter is None or error not in error_filter)} lowerCAmelCase : List[Any] = sum(error_counts.values() ) if n_errors > 0: lowerCAmelCase : Optional[int] = {"""count""": n_errors, """errors""": error_counts} lowerCAmelCase : Any = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = """| no. | error | status |""" lowerCAmelCase : List[Any] = """|-:|:-|:-|""" lowerCAmelCase : Union[str, Any] = [header, sep] for error in reduced_by_error: lowerCAmelCase : List[str] = reduced_by_error[error]["""count"""] lowerCAmelCase : Any = F"""| {count} | {error[:1_0_0]} | |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = """| model | no. of errors | major error | count |""" lowerCAmelCase : Any = """|-:|-:|-:|-:|""" lowerCAmelCase : str = [header, sep] for model in reduced_by_model: lowerCAmelCase : Any = reduced_by_model[model]["""count"""] lowerCAmelCase , lowerCAmelCase : Optional[int] = list(reduced_by_model[model]["""errors"""].items() )[0] lowerCAmelCase : Optional[Any] = F"""| {model} | {count} | {error[:6_0]} | {_count} |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": lowerCAmelCase : int =argparse.ArgumentParser() # Required parameters parser.add_argument('--workflow_run_id', type=str, required=True, help='A GitHub Actions workflow run id.') parser.add_argument( '--output_dir', type=str, required=True, help='Where to store the downloaded artifacts and other result files.', ) parser.add_argument('--token', default=None, type=str, help='A token that has actions:read permission.') lowerCAmelCase : Dict =parser.parse_args() os.makedirs(args.output_dir, exist_ok=True) lowerCAmelCase : Optional[int] =get_job_links(args.workflow_run_id, token=args.token) lowerCAmelCase : List[Any] ={} # To deal with `workflow_call` event, where a job name is the combination of the job names in the caller and callee. # For example, `PyTorch 1.11 / Model tests (models/albert, single-gpu)`. if _job_links: for k, v in _job_links.items(): # This is how GitHub actions combine job names. if " / " in k: lowerCAmelCase : str =k.find(' / ') lowerCAmelCase : Any =k[index + len(' / ') :] lowerCAmelCase : str =v with open(os.path.join(args.output_dir, 'job_links.json'), 'w', encoding='UTF-8') as fp: json.dump(job_links, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Any =get_artifacts_links(args.workflow_run_id, token=args.token) with open(os.path.join(args.output_dir, 'artifacts.json'), 'w', encoding='UTF-8') as fp: json.dump(artifacts, fp, ensure_ascii=False, indent=4) for idx, (name, url) in enumerate(artifacts.items()): download_artifact(name, url, args.output_dir, args.token) # Be gentle to GitHub time.sleep(1) lowerCAmelCase : List[Any] =get_all_errors(args.output_dir, job_links=job_links) # `e[1]` is the error lowerCAmelCase : str =Counter() counter.update([e[1] for e in errors]) # print the top 30 most common test errors lowerCAmelCase : int =counter.most_common(30) for item in most_common: print(item) with open(os.path.join(args.output_dir, 'errors.json'), 'w', encoding='UTF-8') as fp: json.dump(errors, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Optional[int] =reduce_by_error(errors) lowerCAmelCase : Tuple =reduce_by_model(errors) lowerCAmelCase : Optional[Any] =make_github_table(reduced_by_error) lowerCAmelCase : Union[str, Any] =make_github_table_per_model(reduced_by_model) with open(os.path.join(args.output_dir, 'reduced_by_error.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa) with open(os.path.join(args.output_dir, 'reduced_by_model.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa)
693
0
'''simple docstring''' from diffusers.utils.testing_utils import require_onnxruntime @require_onnxruntime class _a : pass
708
from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] ={ 'configuration_autoformer': [ 'AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'AutoformerConfig', ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =[ 'AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'AutoformerForPrediction', 'AutoformerModel', 'AutoformerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_autoformer import ( AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, AutoformerConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_autoformer import ( AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, AutoformerForPrediction, AutoformerModel, AutoformerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
0
'''simple docstring''' import fire from utils import calculate_rouge, save_json def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ,**SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : List[str] = [x.strip() for x in open(_lowercase ).readlines()] lowerCAmelCase : Any = [x.strip() for x in open(_lowercase ).readlines()][: len(_lowercase )] lowerCAmelCase : Dict = calculate_rouge(_lowercase ,_lowercase ,**_lowercase ) if save_path is not None: save_json(_lowercase ,_lowercase ,indent=_lowercase ) return metrics # these print nicely if __name__ == "__main__": fire.Fire(calculate_rouge_path)
709
import copy from collections import OrderedDict from typing import Dict, Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto import CONFIG_MAPPING lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Union[str, Any] ={ 'facebook/detr-resnet-50': 'https://huggingface.co/facebook/detr-resnet-50/resolve/main/config.json', # See all DETR models at https://huggingface.co/models?filter=detr } class _a ( snake_case_ ): _UpperCamelCase: List[str] = "detr" _UpperCamelCase: Dict = ["past_key_values"] _UpperCamelCase: Optional[int] = { "hidden_size": "d_model", "num_attention_heads": "encoder_attention_heads", } def __init__( self , lowercase_=True , lowercase_=None , lowercase_=3 , lowercase_=100 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=True , lowercase_="relu" , lowercase_=256 , lowercase_=0.1 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.0_2 , lowercase_=1.0 , lowercase_=False , lowercase_="sine" , lowercase_="resnet50" , lowercase_=True , lowercase_=False , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=1 , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=0.1 , **lowercase_ , ) -> Optional[int]: if backbone_config is not None and use_timm_backbone: raise ValueError("""You can't specify both `backbone_config` and `use_timm_backbone`.""" ) if not use_timm_backbone: if backbone_config is None: logger.info("""`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.""" ) lowerCAmelCase : Optional[Any] = CONFIG_MAPPING["""resnet"""](out_features=["""stage4"""] ) elif isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = backbone_config.get("""model_type""" ) lowerCAmelCase : int = CONFIG_MAPPING[backbone_model_type] lowerCAmelCase : Optional[int] = config_class.from_dict(lowercase_ ) # set timm attributes to None lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Dict = None, None, None lowerCAmelCase : Any = use_timm_backbone lowerCAmelCase : int = backbone_config lowerCAmelCase : Optional[int] = num_channels lowerCAmelCase : Optional[Any] = num_queries lowerCAmelCase : List[str] = d_model lowerCAmelCase : Optional[int] = encoder_ffn_dim lowerCAmelCase : Dict = encoder_layers lowerCAmelCase : str = encoder_attention_heads lowerCAmelCase : List[Any] = decoder_ffn_dim lowerCAmelCase : List[Any] = decoder_layers lowerCAmelCase : Union[str, Any] = decoder_attention_heads lowerCAmelCase : str = dropout lowerCAmelCase : Dict = attention_dropout lowerCAmelCase : Union[str, Any] = activation_dropout lowerCAmelCase : str = activation_function lowerCAmelCase : Optional[int] = init_std lowerCAmelCase : Any = init_xavier_std lowerCAmelCase : Dict = encoder_layerdrop lowerCAmelCase : int = decoder_layerdrop lowerCAmelCase : Tuple = encoder_layers lowerCAmelCase : Optional[int] = auxiliary_loss lowerCAmelCase : List[str] = position_embedding_type lowerCAmelCase : Any = backbone lowerCAmelCase : Union[str, Any] = use_pretrained_backbone lowerCAmelCase : List[Any] = dilation # Hungarian matcher lowerCAmelCase : Tuple = class_cost lowerCAmelCase : Union[str, Any] = bbox_cost lowerCAmelCase : Optional[Any] = giou_cost # Loss coefficients lowerCAmelCase : List[Any] = mask_loss_coefficient lowerCAmelCase : Optional[int] = dice_loss_coefficient lowerCAmelCase : Tuple = bbox_loss_coefficient lowerCAmelCase : Dict = giou_loss_coefficient lowerCAmelCase : str = eos_coefficient super().__init__(is_encoder_decoder=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> int: return self.encoder_attention_heads @property def _snake_case ( self ) -> int: return self.d_model @classmethod def _snake_case ( cls , lowercase_ , **lowercase_ ) -> Any: return cls(backbone_config=lowercase_ , **lowercase_ ) def _snake_case ( self ) -> Dict[str, any]: lowerCAmelCase : Optional[int] = copy.deepcopy(self.__dict__ ) if output["backbone_config"] is not None: lowerCAmelCase : List[str] = self.backbone_config.to_dict() lowerCAmelCase : List[Any] = self.__class__.model_type return output class _a ( snake_case_ ): _UpperCamelCase: Any = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ("""pixel_mask""", {0: """batch"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-5 @property def _snake_case ( self ) -> int: return 12
693
0
lowerCAmelCase : str ={ 0: '''0''', 1: '''1''', 2: '''2''', 3: '''3''', 4: '''4''', 5: '''5''', 6: '''6''', 7: '''7''', 8: '''8''', 9: '''9''', 10: '''a''', 11: '''b''', 12: '''c''', 13: '''d''', 14: '''e''', 15: '''f''', } def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' assert type(UpperCAmelCase__ ) in (int, float) and decimal == int(UpperCAmelCase__ ) lowerCAmelCase : Union[str, Any] = int(UpperCAmelCase__ ) lowerCAmelCase : List[Any] = """""" lowerCAmelCase : str = False if decimal < 0: lowerCAmelCase : Tuple = True decimal *= -1 while decimal > 0: lowerCAmelCase , lowerCAmelCase : str = divmod(UpperCAmelCase__ ,1_6 ) lowerCAmelCase : Dict = values[remainder] + hexadecimal lowerCAmelCase : int = """0x""" + hexadecimal if negative: lowerCAmelCase : Tuple = """-""" + hexadecimal return hexadecimal if __name__ == "__main__": import doctest doctest.testmod()
710
import json import logging import os import sys from pathlib import Path import finetune_rag from transformers.file_utils import is_apex_available from transformers.testing_utils import ( TestCasePlus, execute_subprocess_async, require_ray, require_torch_gpu, require_torch_multi_gpu, ) logging.basicConfig(level=logging.DEBUG) lowerCAmelCase : int =logging.getLogger() lowerCAmelCase : str =logging.StreamHandler(sys.stdout) logger.addHandler(stream_handler) class _a ( snake_case_ ): def _snake_case ( self , lowercase_ ) -> List[Any]: os.makedirs(lowercase_ , exist_ok=lowercase_ ) lowerCAmelCase : int = {"""source""": """What is love ?""", """target""": """life"""} lowerCAmelCase : Optional[Any] = {"""train""": 12, """val""": 2, """test""": 2} for split in ["train", "test", "val"]: for field in ["source", "target"]: lowerCAmelCase : Tuple = """\n""".join([contents[field]] * n_lines[split] ) with open(os.path.join(lowercase_ , f"""{split}.{field}""" ) , """w""" ) as f: f.write(lowercase_ ) def _snake_case ( self , lowercase_ , lowercase_ = "pytorch" ) -> str: lowerCAmelCase : Dict = self.get_auto_remove_tmp_dir() lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """output""" ) lowerCAmelCase : Dict = os.path.join(lowercase_ , """data""" ) self._create_dummy_data(data_dir=lowercase_ ) lowerCAmelCase : str = f""" --data_dir {data_dir} \ --output_dir {output_dir} \ --model_name_or_path facebook/rag-sequence-base \ --model_type rag_sequence \ --do_train \ --do_predict \ --n_val -1 \ --val_check_interval 1.0 \ --train_batch_size 2 \ --eval_batch_size 1 \ --max_source_length 25 \ --max_target_length 25 \ --val_max_target_length 25 \ --test_max_target_length 25 \ --label_smoothing 0.1 \ --dropout 0.1 \ --attention_dropout 0.1 \ --weight_decay 0.001 \ --adam_epsilon 1e-08 \ --max_grad_norm 0.1 \ --lr_scheduler polynomial \ --learning_rate 3e-04 \ --num_train_epochs 1 \ --warmup_steps 4 \ --gradient_accumulation_steps 1 \ --distributed-port 8787 \ --use_dummy_dataset 1 \ --distributed_retriever {distributed_retriever} \ """.split() if gpus > 0: testargs.append(f"""--gpus={gpus}""" ) if is_apex_available(): testargs.append("""--fp16""" ) else: testargs.append("""--gpus=0""" ) testargs.append("""--distributed_backend=ddp_cpu""" ) testargs.append("""--num_processes=2""" ) lowerCAmelCase : Optional[int] = [sys.executable, str(Path(finetune_rag.__file__ ).resolve() )] + testargs execute_subprocess_async(lowercase_ , env=self.get_env() ) lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """metrics.json""" ) with open(lowercase_ ) as f: lowerCAmelCase : List[str] = json.load(lowercase_ ) return result @require_torch_gpu def _snake_case ( self ) -> Any: lowerCAmelCase : Tuple = self._run_finetune(gpus=1 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self._run_finetune(gpus=2 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_gpu @require_ray def _snake_case ( self ) -> int: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu @require_ray def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 )
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ = 1_0_0 ): '''simple docstring''' lowerCAmelCase : str = set() lowerCAmelCase : Union[str, Any] = 0 lowerCAmelCase : Any = n + 1 # maximum limit for a in range(2 ,_snake_case ): for b in range(2 ,_snake_case ): lowerCAmelCase : Any = a**b # calculates the current power collect_powers.add(_snake_case ) # adds the result to the set return len(_snake_case ) if __name__ == "__main__": print('Number of terms ', solution(int(str(input()).strip())))
711
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Optional[int] ={ 'transfo-xl-wt103': 'https://huggingface.co/transfo-xl-wt103/resolve/main/config.json', } class _a ( snake_case_ ): _UpperCamelCase: Tuple = "transfo-xl" _UpperCamelCase: str = ["mems"] _UpperCamelCase: Dict = { "n_token": "vocab_size", "hidden_size": "d_model", "num_attention_heads": "n_head", "num_hidden_layers": "n_layer", } def __init__( self , lowercase_=267735 , lowercase_=[20000, 40000, 200000] , lowercase_=1024 , lowercase_=1024 , lowercase_=16 , lowercase_=64 , lowercase_=4096 , lowercase_=4 , lowercase_=False , lowercase_=18 , lowercase_=1600 , lowercase_=1000 , lowercase_=True , lowercase_=True , lowercase_=0 , lowercase_=-1 , lowercase_=True , lowercase_=0.1 , lowercase_=0.0 , lowercase_=True , lowercase_="normal" , lowercase_=0.0_1 , lowercase_=0.0_1 , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=0 , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : Union[str, Any] = [] self.cutoffs.extend(lowercase_ ) if proj_share_all_but_first: lowerCAmelCase : Optional[int] = [False] + [True] * len(self.cutoffs ) else: lowerCAmelCase : List[str] = [False] + [False] * len(self.cutoffs ) lowerCAmelCase : Optional[int] = d_model lowerCAmelCase : List[Any] = d_embed lowerCAmelCase : Union[str, Any] = d_head lowerCAmelCase : List[Any] = d_inner lowerCAmelCase : Optional[int] = div_val lowerCAmelCase : List[Any] = pre_lnorm lowerCAmelCase : Dict = n_layer lowerCAmelCase : Tuple = n_head lowerCAmelCase : Any = mem_len lowerCAmelCase : Union[str, Any] = same_length lowerCAmelCase : List[Any] = attn_type lowerCAmelCase : int = clamp_len lowerCAmelCase : List[str] = sample_softmax lowerCAmelCase : Optional[int] = adaptive lowerCAmelCase : Dict = dropout lowerCAmelCase : Optional[Any] = dropatt lowerCAmelCase : List[str] = untie_r lowerCAmelCase : List[str] = init lowerCAmelCase : Tuple = init_range lowerCAmelCase : str = proj_init_std lowerCAmelCase : str = init_std lowerCAmelCase : Optional[int] = layer_norm_epsilon super().__init__(eos_token_id=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> Optional[Any]: # Message copied from Transformer-XL documentation logger.info(f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" ) return -1 @max_position_embeddings.setter def _snake_case ( self , lowercase_ ) -> Dict: # Message copied from Transformer-XL documentation raise NotImplementedError( f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" )
693
0
import unittest import numpy as np from transformers.testing_utils import require_flax, require_tf, require_torch from transformers.utils import ( expand_dims, flatten_dict, is_flax_available, is_tf_available, is_torch_available, reshape, squeeze, transpose, ) if is_flax_available(): import jax.numpy as jnp if is_tf_available(): import tensorflow as tf if is_torch_available(): import torch class _a ( unittest.TestCase ): def _snake_case ( self ) -> int: lowerCAmelCase : str = { '''task_specific_params''': { '''summarization''': {'''length_penalty''': 1.0, '''max_length''': 128, '''min_length''': 12, '''num_beams''': 4}, '''summarization_cnn''': {'''length_penalty''': 2.0, '''max_length''': 142, '''min_length''': 56, '''num_beams''': 4}, '''summarization_xsum''': {'''length_penalty''': 1.0, '''max_length''': 62, '''min_length''': 11, '''num_beams''': 6}, } } lowerCAmelCase : List[str] = { '''task_specific_params.summarization.length_penalty''': 1.0, '''task_specific_params.summarization.max_length''': 128, '''task_specific_params.summarization.min_length''': 12, '''task_specific_params.summarization.num_beams''': 4, '''task_specific_params.summarization_cnn.length_penalty''': 2.0, '''task_specific_params.summarization_cnn.max_length''': 142, '''task_specific_params.summarization_cnn.min_length''': 56, '''task_specific_params.summarization_cnn.num_beams''': 4, '''task_specific_params.summarization_xsum.length_penalty''': 1.0, '''task_specific_params.summarization_xsum.max_length''': 62, '''task_specific_params.summarization_xsum.min_length''': 11, '''task_specific_params.summarization_xsum.num_beams''': 6, } self.assertEqual(flatten_dict(UpperCamelCase__ ) , UpperCamelCase__ ) def _snake_case ( self ) -> str: lowerCAmelCase : Tuple = np.random.randn(3 , 4 ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ ) , x.transpose() ) ) lowerCAmelCase : str = np.random.randn(3 , 4 , 5 ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ , axes=(1, 2, 0) ) , x.transpose((1, 2, 0) ) ) ) @require_torch def _snake_case ( self ) -> Any: lowerCAmelCase : Tuple = np.random.randn(3 , 4 ) lowerCAmelCase : str = torch.tensor(UpperCamelCase__ ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ ) , transpose(UpperCamelCase__ ).numpy() ) ) lowerCAmelCase : Optional[int] = np.random.randn(3 , 4 , 5 ) lowerCAmelCase : Optional[int] = torch.tensor(UpperCamelCase__ ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ , axes=(1, 2, 0) ) , transpose(UpperCamelCase__ , axes=(1, 2, 0) ).numpy() ) ) @require_tf def _snake_case ( self ) -> Any: lowerCAmelCase : List[Any] = np.random.randn(3 , 4 ) lowerCAmelCase : Union[str, Any] = tf.constant(UpperCamelCase__ ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ ) , transpose(UpperCamelCase__ ).numpy() ) ) lowerCAmelCase : Any = np.random.randn(3 , 4 , 5 ) lowerCAmelCase : Dict = tf.constant(UpperCamelCase__ ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ , axes=(1, 2, 0) ) , transpose(UpperCamelCase__ , axes=(1, 2, 0) ).numpy() ) ) @require_flax def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : List[str] = np.random.randn(3 , 4 ) lowerCAmelCase : List[str] = jnp.array(UpperCamelCase__ ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ ) , np.asarray(transpose(UpperCamelCase__ ) ) ) ) lowerCAmelCase : Any = np.random.randn(3 , 4 , 5 ) lowerCAmelCase : Any = jnp.array(UpperCamelCase__ ) self.assertTrue(np.allclose(transpose(UpperCamelCase__ , axes=(1, 2, 0) ) , np.asarray(transpose(UpperCamelCase__ , axes=(1, 2, 0) ) ) ) ) def _snake_case ( self ) -> Any: lowerCAmelCase : Dict = np.random.randn(3 , 4 ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (4, 3) ) , np.reshape(UpperCamelCase__ , (4, 3) ) ) ) lowerCAmelCase : Union[str, Any] = np.random.randn(3 , 4 , 5 ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (12, 5) ) , np.reshape(UpperCamelCase__ , (12, 5) ) ) ) @require_torch def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Dict = np.random.randn(3 , 4 ) lowerCAmelCase : Union[str, Any] = torch.tensor(UpperCamelCase__ ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (4, 3) ) , reshape(UpperCamelCase__ , (4, 3) ).numpy() ) ) lowerCAmelCase : int = np.random.randn(3 , 4 , 5 ) lowerCAmelCase : Optional[int] = torch.tensor(UpperCamelCase__ ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (12, 5) ) , reshape(UpperCamelCase__ , (12, 5) ).numpy() ) ) @require_tf def _snake_case ( self ) -> Any: lowerCAmelCase : Optional[int] = np.random.randn(3 , 4 ) lowerCAmelCase : Optional[int] = tf.constant(UpperCamelCase__ ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (4, 3) ) , reshape(UpperCamelCase__ , (4, 3) ).numpy() ) ) lowerCAmelCase : Any = np.random.randn(3 , 4 , 5 ) lowerCAmelCase : Optional[int] = tf.constant(UpperCamelCase__ ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (12, 5) ) , reshape(UpperCamelCase__ , (12, 5) ).numpy() ) ) @require_flax def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Optional[int] = np.random.randn(3 , 4 ) lowerCAmelCase : Union[str, Any] = jnp.array(UpperCamelCase__ ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (4, 3) ) , np.asarray(reshape(UpperCamelCase__ , (4, 3) ) ) ) ) lowerCAmelCase : int = np.random.randn(3 , 4 , 5 ) lowerCAmelCase : List[str] = jnp.array(UpperCamelCase__ ) self.assertTrue(np.allclose(reshape(UpperCamelCase__ , (12, 5) ) , np.asarray(reshape(UpperCamelCase__ , (12, 5) ) ) ) ) def _snake_case ( self ) -> str: lowerCAmelCase : Dict = np.random.randn(1 , 3 , 4 ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ ) , np.squeeze(UpperCamelCase__ ) ) ) lowerCAmelCase : Dict = np.random.randn(1 , 4 , 1 , 5 ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ , axis=2 ) , np.squeeze(UpperCamelCase__ , axis=2 ) ) ) @require_torch def _snake_case ( self ) -> int: lowerCAmelCase : List[Any] = np.random.randn(1 , 3 , 4 ) lowerCAmelCase : List[str] = torch.tensor(UpperCamelCase__ ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ ) , squeeze(UpperCamelCase__ ).numpy() ) ) lowerCAmelCase : Dict = np.random.randn(1 , 4 , 1 , 5 ) lowerCAmelCase : Optional[int] = torch.tensor(UpperCamelCase__ ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ , axis=2 ) , squeeze(UpperCamelCase__ , axis=2 ).numpy() ) ) @require_tf def _snake_case ( self ) -> int: lowerCAmelCase : int = np.random.randn(1 , 3 , 4 ) lowerCAmelCase : Dict = tf.constant(UpperCamelCase__ ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ ) , squeeze(UpperCamelCase__ ).numpy() ) ) lowerCAmelCase : List[Any] = np.random.randn(1 , 4 , 1 , 5 ) lowerCAmelCase : Optional[Any] = tf.constant(UpperCamelCase__ ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ , axis=2 ) , squeeze(UpperCamelCase__ , axis=2 ).numpy() ) ) @require_flax def _snake_case ( self ) -> str: lowerCAmelCase : int = np.random.randn(1 , 3 , 4 ) lowerCAmelCase : str = jnp.array(UpperCamelCase__ ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ ) , np.asarray(squeeze(UpperCamelCase__ ) ) ) ) lowerCAmelCase : str = np.random.randn(1 , 4 , 1 , 5 ) lowerCAmelCase : Optional[int] = jnp.array(UpperCamelCase__ ) self.assertTrue(np.allclose(squeeze(UpperCamelCase__ , axis=2 ) , np.asarray(squeeze(UpperCamelCase__ , axis=2 ) ) ) ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = np.random.randn(3 , 4 ) self.assertTrue(np.allclose(expand_dims(UpperCamelCase__ , axis=1 ) , np.expand_dims(UpperCamelCase__ , axis=1 ) ) ) @require_torch def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : List[Any] = np.random.randn(3 , 4 ) lowerCAmelCase : Optional[Any] = torch.tensor(UpperCamelCase__ ) self.assertTrue(np.allclose(expand_dims(UpperCamelCase__ , axis=1 ) , expand_dims(UpperCamelCase__ , axis=1 ).numpy() ) ) @require_tf def _snake_case ( self ) -> List[str]: lowerCAmelCase : int = np.random.randn(3 , 4 ) lowerCAmelCase : str = tf.constant(UpperCamelCase__ ) self.assertTrue(np.allclose(expand_dims(UpperCamelCase__ , axis=1 ) , expand_dims(UpperCamelCase__ , axis=1 ).numpy() ) ) @require_flax def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Union[str, Any] = np.random.randn(3 , 4 ) lowerCAmelCase : Optional[int] = jnp.array(UpperCamelCase__ ) self.assertTrue(np.allclose(expand_dims(UpperCamelCase__ , axis=1 ) , np.asarray(expand_dims(UpperCamelCase__ , axis=1 ) ) ) )
712
import torch from diffusers import DiffusionPipeline class _a ( snake_case_ ): def __init__( self , lowercase_ , lowercase_ ) -> int: super().__init__() self.register_modules(unet=lowercase_ , scheduler=lowercase_ ) def __call__( self ) -> List[Any]: lowerCAmelCase : Union[str, Any] = torch.randn( (1, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size) , ) lowerCAmelCase : Union[str, Any] = 1 lowerCAmelCase : Dict = self.unet(lowercase_ , lowercase_ ).sample lowerCAmelCase : str = self.scheduler.step(lowercase_ , lowercase_ , lowercase_ ).prev_sample lowerCAmelCase : Dict = scheduler_output - scheduler_output + torch.ones_like(lowercase_ ) return result
693
0
import inspect import unittest import numpy as np from tests.test_modeling_common import floats_tensor from transformers import MaskaFormerConfig, is_torch_available, is_vision_available from transformers.testing_utils import require_torch, require_torch_multi_gpu, require_vision, slow, torch_device from transformers.utils import cached_property from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from transformers import MaskaFormerForUniversalSegmentation, MaskaFormerModel if is_vision_available(): from transformers import MaskaFormerImageProcessor if is_vision_available(): from PIL import Image class _a : def __init__( self , lowercase_ , lowercase_=2 , lowercase_=True , lowercase_=False , lowercase_=10 , lowercase_=3 , lowercase_=32 * 8 , lowercase_=32 * 8 , lowercase_=4 , lowercase_=64 , ) -> List[Any]: lowerCAmelCase : Tuple = parent lowerCAmelCase : int = batch_size lowerCAmelCase : Optional[Any] = is_training lowerCAmelCase : str = use_auxiliary_loss lowerCAmelCase : str = num_queries lowerCAmelCase : List[Any] = num_channels lowerCAmelCase : Tuple = min_size lowerCAmelCase : int = max_size lowerCAmelCase : List[Any] = num_labels lowerCAmelCase : List[str] = hidden_dim lowerCAmelCase : Tuple = hidden_dim def _snake_case ( self ) -> int: lowerCAmelCase : List[Any] = floats_tensor([self.batch_size, self.num_channels, self.min_size, self.max_size] ).to( _SCREAMING_SNAKE_CASE ) lowerCAmelCase : List[str] = torch.ones([self.batch_size, self.min_size, self.max_size] , device=_SCREAMING_SNAKE_CASE ) lowerCAmelCase : int = ( torch.rand([self.batch_size, self.num_labels, self.min_size, self.max_size] , device=_SCREAMING_SNAKE_CASE ) > 0.5 ).float() lowerCAmelCase : Dict = (torch.rand((self.batch_size, self.num_labels) , device=_SCREAMING_SNAKE_CASE ) > 0.5).long() lowerCAmelCase : Optional[Any] = self.get_config() return config, pixel_values, pixel_mask, mask_labels, class_labels def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = MaskaFormerConfig( hidden_size=self.hidden_dim , ) lowerCAmelCase : List[str] = self.num_queries lowerCAmelCase : Optional[Any] = self.num_labels lowerCAmelCase : int = [1, 1, 1, 1] lowerCAmelCase : List[Any] = self.num_channels lowerCAmelCase : Any = 64 lowerCAmelCase : int = 128 lowerCAmelCase : Union[str, Any] = self.hidden_dim lowerCAmelCase : Dict = self.hidden_dim lowerCAmelCase : int = self.hidden_dim return config def _snake_case ( self ) -> str: lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Optional[int] = self.prepare_config_and_inputs() lowerCAmelCase : List[str] = {"""pixel_values""": pixel_values, """pixel_mask""": pixel_mask} return config, inputs_dict def _snake_case ( self , lowercase_ , lowercase_ ) -> Union[str, Any]: lowerCAmelCase : List[str] = output.encoder_hidden_states lowerCAmelCase : List[str] = output.pixel_decoder_hidden_states lowerCAmelCase : List[str] = output.transformer_decoder_hidden_states self.parent.assertTrue(len(_SCREAMING_SNAKE_CASE ) , len(config.backbone_config.depths ) ) self.parent.assertTrue(len(_SCREAMING_SNAKE_CASE ) , len(config.backbone_config.depths ) ) self.parent.assertTrue(len(_SCREAMING_SNAKE_CASE ) , config.decoder_layers ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_=False ) -> Tuple: with torch.no_grad(): lowerCAmelCase : int = MaskaFormerModel(config=_SCREAMING_SNAKE_CASE ) model.to(_SCREAMING_SNAKE_CASE ) model.eval() lowerCAmelCase : int = model(pixel_values=_SCREAMING_SNAKE_CASE , pixel_mask=_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Optional[Any] = model(_SCREAMING_SNAKE_CASE , output_hidden_states=_SCREAMING_SNAKE_CASE ) self.parent.assertEqual( output.transformer_decoder_last_hidden_state.shape , (self.batch_size, self.num_queries, self.hidden_dim) , ) # let's ensure the other two hidden state exists self.parent.assertTrue(output.pixel_decoder_last_hidden_state is not None ) self.parent.assertTrue(output.encoder_last_hidden_state is not None ) if output_hidden_states: self.check_output_hidden_state(_SCREAMING_SNAKE_CASE , _SCREAMING_SNAKE_CASE ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ ) -> List[Any]: lowerCAmelCase : Any = MaskaFormerForUniversalSegmentation(config=_SCREAMING_SNAKE_CASE ) model.to(_SCREAMING_SNAKE_CASE ) model.eval() def comm_check_on_output(lowercase_ ): # let's still check that all the required stuff is there self.parent.assertTrue(result.transformer_decoder_last_hidden_state is not None ) self.parent.assertTrue(result.pixel_decoder_last_hidden_state is not None ) self.parent.assertTrue(result.encoder_last_hidden_state is not None ) # okay, now we need to check the logits shape # due to the encoder compression, masks have a //4 spatial size self.parent.assertEqual( result.masks_queries_logits.shape , (self.batch_size, self.num_queries, self.min_size // 4, self.max_size // 4) , ) # + 1 for null class self.parent.assertEqual( result.class_queries_logits.shape , (self.batch_size, self.num_queries, self.num_labels + 1) ) with torch.no_grad(): lowerCAmelCase : Tuple = model(pixel_values=_SCREAMING_SNAKE_CASE , pixel_mask=_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Optional[int] = model(_SCREAMING_SNAKE_CASE ) comm_check_on_output(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : List[Any] = model( pixel_values=_SCREAMING_SNAKE_CASE , pixel_mask=_SCREAMING_SNAKE_CASE , mask_labels=_SCREAMING_SNAKE_CASE , class_labels=_SCREAMING_SNAKE_CASE ) comm_check_on_output(_SCREAMING_SNAKE_CASE ) self.parent.assertTrue(result.loss is not None ) self.parent.assertEqual(result.loss.shape , torch.Size([1] ) ) @require_torch class _a ( snake_case_ , snake_case_ , unittest.TestCase ): _UpperCamelCase: str = (MaskaFormerModel, MaskaFormerForUniversalSegmentation) if is_torch_available() else () _UpperCamelCase: Optional[Any] = {"feature-extraction": MaskaFormerModel} if is_torch_available() else {} _UpperCamelCase: Tuple = False _UpperCamelCase: List[str] = False _UpperCamelCase: Optional[int] = False _UpperCamelCase: Optional[Any] = False def _snake_case ( self ) -> List[str]: lowerCAmelCase : Optional[int] = MaskaFormerModelTester(self ) lowerCAmelCase : List[str] = ConfigTester(self , config_class=_SCREAMING_SNAKE_CASE , has_text_modality=_SCREAMING_SNAKE_CASE ) def _snake_case ( self ) -> Tuple: self.config_tester.run_common_tests() def _snake_case ( self ) -> List[Any]: lowerCAmelCase , lowerCAmelCase : Optional[Any] = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.create_and_check_maskaformer_model(_SCREAMING_SNAKE_CASE , **_SCREAMING_SNAKE_CASE , output_hidden_states=_SCREAMING_SNAKE_CASE ) def _snake_case ( self ) -> Tuple: lowerCAmelCase : Union[str, Any] = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_maskaformer_instance_segmentation_head_model(*_SCREAMING_SNAKE_CASE ) @unittest.skip(reason="""Mask2Former does not use inputs_embeds""" ) def _snake_case ( self ) -> Any: pass @unittest.skip(reason="""Mask2Former does not have a get_input_embeddings method""" ) def _snake_case ( self ) -> Tuple: pass @unittest.skip(reason="""Mask2Former is not a generative model""" ) def _snake_case ( self ) -> Dict: pass @unittest.skip(reason="""Mask2Former does not use token embeddings""" ) def _snake_case ( self ) -> Tuple: pass @require_torch_multi_gpu @unittest.skip( reason="""Mask2Former has some layers using `add_module` which doesn't work well with `nn.DataParallel`""" ) def _snake_case ( self ) -> str: pass @unittest.skip("""Will be fixed soon by reducing the size of the model used for common tests.""" ) def _snake_case ( self ) -> Optional[int]: pass def _snake_case ( self ) -> Tuple: lowerCAmelCase , lowerCAmelCase : Union[str, Any] = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: lowerCAmelCase : Union[str, Any] = model_class(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Optional[int] = inspect.signature(model.forward ) # signature.parameters is an OrderedDict => so arg_names order is deterministic lowerCAmelCase : List[Any] = [*signature.parameters.keys()] lowerCAmelCase : Optional[int] = ["""pixel_values"""] self.assertListEqual(arg_names[:1] , _SCREAMING_SNAKE_CASE ) @slow def _snake_case ( self ) -> str: for model_name in ["facebook/mask2former-swin-small-coco-instance"]: lowerCAmelCase : Optional[int] = MaskaFormerModel.from_pretrained(_SCREAMING_SNAKE_CASE ) self.assertIsNotNone(_SCREAMING_SNAKE_CASE ) def _snake_case ( self ) -> int: lowerCAmelCase : Tuple = (self.model_tester.min_size,) * 2 lowerCAmelCase : Optional[Any] = { """pixel_values""": torch.randn((2, 3, *size) , device=_SCREAMING_SNAKE_CASE ), """mask_labels""": torch.randn((2, 10, *size) , device=_SCREAMING_SNAKE_CASE ), """class_labels""": torch.zeros(2 , 10 , device=_SCREAMING_SNAKE_CASE ).long(), } lowerCAmelCase : Optional[int] = self.model_tester.get_config() lowerCAmelCase : List[str] = MaskaFormerForUniversalSegmentation(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Dict = model(**_SCREAMING_SNAKE_CASE ) self.assertTrue(outputs.loss is not None ) def _snake_case ( self ) -> Dict: lowerCAmelCase , lowerCAmelCase : Any = self.model_tester.prepare_config_and_inputs_for_common() self.model_tester.create_and_check_maskaformer_model(_SCREAMING_SNAKE_CASE , **_SCREAMING_SNAKE_CASE , output_hidden_states=_SCREAMING_SNAKE_CASE ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase , lowerCAmelCase : int = self.model_tester.prepare_config_and_inputs_for_common() for model_class in self.all_model_classes: lowerCAmelCase : Tuple = model_class(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : int = model(**_SCREAMING_SNAKE_CASE , output_attentions=_SCREAMING_SNAKE_CASE ) self.assertTrue(outputs.attentions is not None ) def _snake_case ( self ) -> Dict: if not self.model_tester.is_training: return lowerCAmelCase : Optional[Any] = self.all_model_classes[1] lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : List[Any] = self.model_tester.prepare_config_and_inputs() lowerCAmelCase : Dict = model_class(_SCREAMING_SNAKE_CASE ) model.to(_SCREAMING_SNAKE_CASE ) model.train() lowerCAmelCase : Optional[int] = model(_SCREAMING_SNAKE_CASE , mask_labels=_SCREAMING_SNAKE_CASE , class_labels=_SCREAMING_SNAKE_CASE ).loss loss.backward() def _snake_case ( self ) -> str: lowerCAmelCase : Any = self.all_model_classes[1] lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : List[str] = self.model_tester.prepare_config_and_inputs() lowerCAmelCase : Union[str, Any] = True lowerCAmelCase : List[Any] = True lowerCAmelCase : List[Any] = model_class(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE ) model.train() lowerCAmelCase : Optional[int] = model(_SCREAMING_SNAKE_CASE , mask_labels=_SCREAMING_SNAKE_CASE , class_labels=_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Union[str, Any] = outputs.encoder_hidden_states[0] encoder_hidden_states.retain_grad() lowerCAmelCase : Union[str, Any] = outputs.pixel_decoder_hidden_states[0] pixel_decoder_hidden_states.retain_grad() lowerCAmelCase : Union[str, Any] = outputs.transformer_decoder_hidden_states[0] transformer_decoder_hidden_states.retain_grad() lowerCAmelCase : List[str] = outputs.attentions[0] attentions.retain_grad() outputs.loss.backward(retain_graph=_SCREAMING_SNAKE_CASE ) self.assertIsNotNone(encoder_hidden_states.grad ) self.assertIsNotNone(pixel_decoder_hidden_states.grad ) self.assertIsNotNone(transformer_decoder_hidden_states.grad ) self.assertIsNotNone(attentions.grad ) lowerCAmelCase : str =1e-4 def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : List[Any] = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) return image @require_vision @slow class _a ( unittest.TestCase ): @cached_property def _snake_case ( self ) -> Any: return "facebook/mask2former-swin-small-coco-instance" @cached_property def _snake_case ( self ) -> Any: return MaskaFormerImageProcessor.from_pretrained(self.model_checkpoints ) if is_vision_available() else None def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Union[str, Any] = MaskaFormerModel.from_pretrained(self.model_checkpoints ).to(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : int = self.default_image_processor lowerCAmelCase : Optional[Any] = prepare_img() lowerCAmelCase : List[str] = image_processor(_SCREAMING_SNAKE_CASE , return_tensors="""pt""" ).to(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Union[str, Any] = inputs["""pixel_values"""].shape # check size is divisible by 32 self.assertTrue((inputs_shape[-1] % 32) == 0 and (inputs_shape[-2] % 32) == 0 ) # check size self.assertEqual(_SCREAMING_SNAKE_CASE , (1, 3, 384, 384) ) with torch.no_grad(): lowerCAmelCase : Optional[Any] = model(**_SCREAMING_SNAKE_CASE ) lowerCAmelCase : List[str] = torch.tensor( [[-0.2_7_9_0, -1.0_7_1_7, -1.1_6_6_8], [-0.5_1_2_8, -0.3_1_2_8, -0.4_9_8_7], [-0.5_8_3_2, 0.1_9_7_1, -0.0_1_9_7]] ).to(_SCREAMING_SNAKE_CASE ) self.assertTrue( torch.allclose( outputs.encoder_last_hidden_state[0, 0, :3, :3] , _SCREAMING_SNAKE_CASE , atol=_SCREAMING_SNAKE_CASE ) ) lowerCAmelCase : Any = torch.tensor( [[0.8_9_7_3, 1.1_8_4_7, 1.1_7_7_6], [1.1_9_3_4, 1.5_0_4_0, 1.5_1_2_8], [1.1_1_5_3, 1.4_4_8_6, 1.4_9_5_1]] ).to(_SCREAMING_SNAKE_CASE ) self.assertTrue( torch.allclose( outputs.pixel_decoder_last_hidden_state[0, 0, :3, :3] , _SCREAMING_SNAKE_CASE , atol=_SCREAMING_SNAKE_CASE ) ) lowerCAmelCase : Optional[int] = torch.tensor( [[2.1_1_5_2, 1.7_0_0_0, -0.8_6_0_3], [1.5_8_0_8, 1.8_0_0_4, -0.9_3_5_3], [1.6_0_4_3, 1.7_4_9_5, -0.5_9_9_9]] ).to(_SCREAMING_SNAKE_CASE ) self.assertTrue( torch.allclose( outputs.transformer_decoder_last_hidden_state[0, :3, :3] , _SCREAMING_SNAKE_CASE , atol=_SCREAMING_SNAKE_CASE ) ) def _snake_case ( self ) -> Any: lowerCAmelCase : Optional[Any] = MaskaFormerForUniversalSegmentation.from_pretrained(self.model_checkpoints ).to(_SCREAMING_SNAKE_CASE ).eval() lowerCAmelCase : Optional[int] = self.default_image_processor lowerCAmelCase : Optional[Any] = prepare_img() lowerCAmelCase : Union[str, Any] = image_processor(_SCREAMING_SNAKE_CASE , return_tensors="""pt""" ).to(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Union[str, Any] = inputs["""pixel_values"""].shape # check size is divisible by 32 self.assertTrue((inputs_shape[-1] % 32) == 0 and (inputs_shape[-2] % 32) == 0 ) # check size self.assertEqual(_SCREAMING_SNAKE_CASE , (1, 3, 384, 384) ) with torch.no_grad(): lowerCAmelCase : Optional[Any] = model(**_SCREAMING_SNAKE_CASE ) # masks_queries_logits lowerCAmelCase : int = outputs.masks_queries_logits self.assertEqual( masks_queries_logits.shape , (1, model.config.num_queries, inputs_shape[-2] // 4, inputs_shape[-1] // 4) ) lowerCAmelCase : int = [ [-8.7_8_3_9, -9.0_0_5_6, -8.8_1_2_1], [-7.4_1_0_4, -7.0_3_1_3, -6.5_4_0_1], [-6.6_1_0_5, -6.3_4_2_7, -6.4_6_7_5], ] lowerCAmelCase : str = torch.tensor(_SCREAMING_SNAKE_CASE ).to(_SCREAMING_SNAKE_CASE ) self.assertTrue(torch.allclose(masks_queries_logits[0, 0, :3, :3] , _SCREAMING_SNAKE_CASE , atol=_SCREAMING_SNAKE_CASE ) ) # class_queries_logits lowerCAmelCase : Dict = outputs.class_queries_logits self.assertEqual(class_queries_logits.shape , (1, model.config.num_queries, model.config.num_labels + 1) ) lowerCAmelCase : Any = torch.tensor( [ [1.8_3_2_4, -8.0_8_3_5, -4.1_9_2_2], [0.8_4_5_0, -9.0_0_5_0, -3.6_0_5_3], [0.3_0_4_5, -7.7_2_9_3, -3.0_2_7_5], ] ).to(_SCREAMING_SNAKE_CASE ) self.assertTrue(torch.allclose(outputs.class_queries_logits[0, :3, :3] , _SCREAMING_SNAKE_CASE , atol=_SCREAMING_SNAKE_CASE ) ) def _snake_case ( self ) -> List[str]: lowerCAmelCase : List[Any] = MaskaFormerForUniversalSegmentation.from_pretrained(self.model_checkpoints ).to(_SCREAMING_SNAKE_CASE ).eval() lowerCAmelCase : Union[str, Any] = self.default_image_processor lowerCAmelCase : str = image_processor( [np.zeros((3, 800, 1333) ), np.zeros((3, 800, 1333) )] , segmentation_maps=[np.zeros((384, 384) ).astype(np.floataa ), np.zeros((384, 384) ).astype(np.floataa )] , return_tensors="""pt""" , ) lowerCAmelCase : Any = inputs["""pixel_values"""].to(_SCREAMING_SNAKE_CASE ) lowerCAmelCase : Union[str, Any] = [el.to(_SCREAMING_SNAKE_CASE ) for el in inputs["""mask_labels"""]] lowerCAmelCase : str = [el.to(_SCREAMING_SNAKE_CASE ) for el in inputs["""class_labels"""]] with torch.no_grad(): lowerCAmelCase : List[Any] = model(**_SCREAMING_SNAKE_CASE ) self.assertTrue(outputs.loss is not None )
713
import pytest import requests from datasets.utils.file_utils import http_head from .utils import OfflineSimulationMode, RequestWouldHangIndefinitelyError, offline @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_TIMES_OUT ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): requests.request("""GET""" ,"""https://huggingface.co""" ) with pytest.raises(requests.exceptions.ConnectTimeout ): requests.request("""GET""" ,"""https://huggingface.co""" ,timeout=1.0 ) @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_FAILS ): with pytest.raises(requests.exceptions.ConnectionError ): requests.request("""GET""" ,"""https://huggingface.co""" ) def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.HF_DATASETS_OFFLINE_SET_TO_1 ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): http_head("""https://huggingface.co""" )
693
0
import os import pytest from datasets import ( get_dataset_config_info, get_dataset_config_names, get_dataset_infos, get_dataset_split_names, inspect_dataset, inspect_metric, ) lowerCAmelCase : str =pytest.mark.integration @pytest.mark.parametrize("""path""" ,["""paws""", """csv"""] ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' inspect_dataset(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : List[str] = path + """.py""" assert script_name in os.listdir(SCREAMING_SNAKE_CASE__ ) assert "__pycache__" not in os.listdir(SCREAMING_SNAKE_CASE__ ) @pytest.mark.filterwarnings("""ignore:inspect_metric is deprecated:FutureWarning""" ) @pytest.mark.filterwarnings("""ignore:metric_module_factory is deprecated:FutureWarning""" ) @pytest.mark.parametrize("""path""" ,["""accuracy"""] ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' inspect_metric(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : str = path + """.py""" assert script_name in os.listdir(SCREAMING_SNAKE_CASE__ ) assert "__pycache__" not in os.listdir(SCREAMING_SNAKE_CASE__ ) @pytest.mark.parametrize( """path, config_name, expected_splits""" ,[ ("""squad""", """plain_text""", ["""train""", """validation"""]), ("""dalle-mini/wit""", """dalle-mini--wit""", ["""train"""]), ("""paws""", """labeled_final""", ["""train""", """test""", """validation"""]), ] ,) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = get_dataset_config_info(SCREAMING_SNAKE_CASE__ ,config_name=SCREAMING_SNAKE_CASE__ ) assert info.config_name == config_name assert list(info.splits.keys() ) == expected_splits @pytest.mark.parametrize( """path, config_name, expected_exception""" ,[ ("""paws""", None, ValueError), ] ,) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' with pytest.raises(SCREAMING_SNAKE_CASE__ ): get_dataset_config_info(SCREAMING_SNAKE_CASE__ ,config_name=SCREAMING_SNAKE_CASE__ ) @pytest.mark.parametrize( """path, expected""" ,[ ("""squad""", """plain_text"""), ("""acronym_identification""", """default"""), ("""lhoestq/squad""", """plain_text"""), ("""lhoestq/test""", """default"""), ("""lhoestq/demo1""", """lhoestq--demo1"""), ("""dalle-mini/wit""", """dalle-mini--wit"""), ] ,) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Any = get_dataset_config_names(SCREAMING_SNAKE_CASE__ ) assert expected in config_names @pytest.mark.parametrize( """path, expected_configs, expected_splits_in_first_config""" ,[ ("""squad""", ["""plain_text"""], ["""train""", """validation"""]), ("""dalle-mini/wit""", ["""dalle-mini--wit"""], ["""train"""]), ("""paws""", ["""labeled_final""", """labeled_swap""", """unlabeled_final"""], ["""train""", """test""", """validation"""]), ] ,) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : List[Any] = get_dataset_infos(SCREAMING_SNAKE_CASE__ ) assert list(infos.keys() ) == expected_configs lowerCAmelCase : Dict = expected_configs[0] assert expected_config in infos lowerCAmelCase : List[str] = infos[expected_config] assert info.config_name == expected_config assert list(info.splits.keys() ) == expected_splits_in_first_config @pytest.mark.parametrize( """path, expected_config, expected_splits""" ,[ ("""squad""", """plain_text""", ["""train""", """validation"""]), ("""dalle-mini/wit""", """dalle-mini--wit""", ["""train"""]), ("""paws""", """labeled_final""", ["""train""", """test""", """validation"""]), ] ,) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[int] = get_dataset_infos(SCREAMING_SNAKE_CASE__ ) assert expected_config in infos lowerCAmelCase : Optional[int] = infos[expected_config] assert info.config_name == expected_config assert list(info.splits.keys() ) == expected_splits @pytest.mark.parametrize( """path, config_name, expected_exception""" ,[ ("""paws""", None, ValueError), ] ,) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' with pytest.raises(SCREAMING_SNAKE_CASE__ ): get_dataset_split_names(SCREAMING_SNAKE_CASE__ ,config_name=SCREAMING_SNAKE_CASE__ )
714
import json import pathlib import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision, slow from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import DetrImageProcessor class _a ( unittest.TestCase ): def __init__( self , lowercase_ , lowercase_=7 , lowercase_=3 , lowercase_=30 , lowercase_=400 , lowercase_=True , lowercase_=None , lowercase_=True , lowercase_=1 / 255 , lowercase_=True , lowercase_=[0.5, 0.5, 0.5] , lowercase_=[0.5, 0.5, 0.5] , lowercase_=True , ) -> Tuple: # by setting size["longest_edge"] > max_resolution we're effectively not testing this :p lowerCAmelCase : Optional[Any] = size if size is not None else {"""shortest_edge""": 18, """longest_edge""": 1333} lowerCAmelCase : Optional[int] = parent lowerCAmelCase : Optional[int] = batch_size lowerCAmelCase : Dict = num_channels lowerCAmelCase : str = min_resolution lowerCAmelCase : Optional[Any] = max_resolution lowerCAmelCase : Optional[int] = do_resize lowerCAmelCase : List[str] = size lowerCAmelCase : Dict = do_rescale lowerCAmelCase : Union[str, Any] = rescale_factor lowerCAmelCase : int = do_normalize lowerCAmelCase : Union[str, Any] = image_mean lowerCAmelCase : Dict = image_std lowerCAmelCase : Optional[int] = do_pad def _snake_case ( self ) -> Any: return { "do_resize": self.do_resize, "size": self.size, "do_rescale": self.do_rescale, "rescale_factor": self.rescale_factor, "do_normalize": self.do_normalize, "image_mean": self.image_mean, "image_std": self.image_std, "do_pad": self.do_pad, } def _snake_case ( self , lowercase_ , lowercase_=False ) -> List[Any]: if not batched: lowerCAmelCase : Tuple = image_inputs[0] if isinstance(lowercase_ , Image.Image ): lowerCAmelCase , lowerCAmelCase : Dict = image.size else: lowerCAmelCase , lowerCAmelCase : Tuple = image.shape[1], image.shape[2] if w < h: lowerCAmelCase : Union[str, Any] = int(self.size["""shortest_edge"""] * h / w ) lowerCAmelCase : Optional[Any] = self.size["""shortest_edge"""] elif w > h: lowerCAmelCase : List[Any] = self.size["""shortest_edge"""] lowerCAmelCase : List[Any] = int(self.size["""shortest_edge"""] * w / h ) else: lowerCAmelCase : Optional[int] = self.size["""shortest_edge"""] lowerCAmelCase : List[str] = self.size["""shortest_edge"""] else: lowerCAmelCase : Optional[int] = [] for image in image_inputs: lowerCAmelCase , lowerCAmelCase : int = self.get_expected_values([image] ) expected_values.append((expected_height, expected_width) ) lowerCAmelCase : Any = max(lowercase_ , key=lambda lowercase_ : item[0] )[0] lowerCAmelCase : Union[str, Any] = max(lowercase_ , key=lambda lowercase_ : item[1] )[1] return expected_height, expected_width @require_torch @require_vision class _a ( snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = DetrImageProcessor if is_vision_available() else None def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : List[str] = DetrImageProcessingTester(self ) @property def _snake_case ( self ) -> str: return self.image_processor_tester.prepare_image_processor_dict() def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[str] = self.image_processing_class(**self.image_processor_dict ) self.assertTrue(hasattr(lowercase_ , """image_mean""" ) ) self.assertTrue(hasattr(lowercase_ , """image_std""" ) ) self.assertTrue(hasattr(lowercase_ , """do_normalize""" ) ) self.assertTrue(hasattr(lowercase_ , """do_rescale""" ) ) self.assertTrue(hasattr(lowercase_ , """rescale_factor""" ) ) self.assertTrue(hasattr(lowercase_ , """do_resize""" ) ) self.assertTrue(hasattr(lowercase_ , """size""" ) ) self.assertTrue(hasattr(lowercase_ , """do_pad""" ) ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Union[str, Any] = self.image_processing_class.from_dict(self.image_processor_dict ) self.assertEqual(image_processor.size , {"""shortest_edge""": 18, """longest_edge""": 1333} ) self.assertEqual(image_processor.do_pad , lowercase_ ) lowerCAmelCase : Optional[Any] = self.image_processing_class.from_dict( self.image_processor_dict , size=42 , max_size=84 , pad_and_return_pixel_mask=lowercase_ ) self.assertEqual(image_processor.size , {"""shortest_edge""": 42, """longest_edge""": 84} ) self.assertEqual(image_processor.do_pad , lowercase_ ) def _snake_case ( self ) -> List[Any]: pass def _snake_case ( self ) -> List[Any]: # Initialize image_processing lowerCAmelCase : str = self.image_processing_class(**self.image_processor_dict ) # create random PIL images lowerCAmelCase : str = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , Image.Image ) # Test not batched input lowerCAmelCase : int = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Tuple = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) lowerCAmelCase : Optional[int] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> Optional[int]: # Initialize image_processing lowerCAmelCase : int = self.image_processing_class(**self.image_processor_dict ) # create random numpy tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , numpify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , np.ndarray ) # Test not batched input lowerCAmelCase : List[Any] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Dict = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : List[Any] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : int = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> List[str]: # Initialize image_processing lowerCAmelCase : List[Any] = self.image_processing_class(**self.image_processor_dict ) # create random PyTorch tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , torchify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , torch.Tensor ) # Test not batched input lowerCAmelCase : Optional[int] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : str = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : List[str] = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) @slow def _snake_case ( self ) -> int: # prepare image and target lowerCAmelCase : Tuple = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_annotations.txt""" , """r""" ) as f: lowerCAmelCase : str = json.loads(f.read() ) lowerCAmelCase : List[Any] = {"""image_id""": 39769, """annotations""": target} # encode them lowerCAmelCase : Dict = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50""" ) lowerCAmelCase : List[str] = image_processing(images=lowercase_ , annotations=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Union[str, Any] = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : List[str] = torch.tensor([5_8_8_7.9_6_0_0, 1_1_2_5_0.2_0_6_1, 4_8_9_3_5_3.8_4_3_8, 8_3_7_1_2_2.7_5_0_0, 1_4_7_9_6_7.5_1_5_6, 1_6_5_7_3_2.3_4_3_8] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Tuple = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Dict = torch.tensor([0.5_5_0_3, 0.2_7_6_5, 0.0_6_0_4, 0.2_2_1_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : List[Any] = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Union[str, Any] = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : str = torch.tensor([75, 75, 63, 65, 17, 17] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify orig_size lowerCAmelCase : int = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : str = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) ) @slow def _snake_case ( self ) -> int: # prepare image, target and masks_path lowerCAmelCase : List[Any] = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_panoptic_annotations.txt""" , """r""" ) as f: lowerCAmelCase : Any = json.loads(f.read() ) lowerCAmelCase : Optional[Any] = {"""file_name""": """000000039769.png""", """image_id""": 39769, """segments_info""": target} lowerCAmelCase : List[str] = pathlib.Path("""./tests/fixtures/tests_samples/COCO/coco_panoptic""" ) # encode them lowerCAmelCase : Any = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50-panoptic""" ) lowerCAmelCase : Tuple = image_processing(images=lowercase_ , annotations=lowercase_ , masks_path=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Tuple = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : str = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : Union[str, Any] = torch.tensor([1_4_7_9_7_9.6_8_7_5, 1_6_5_5_2_7.0_4_6_9, 4_8_4_6_3_8.5_9_3_8, 1_1_2_9_2.9_3_7_5, 5_8_7_9.6_5_6_2, 7_6_3_4.1_1_4_7] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Optional[int] = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_6_2_5, 0.5_4_3_7, 0.4_6_8_8, 0.8_6_2_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : Tuple = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Any = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : Tuple = torch.tensor([17, 17, 63, 75, 75, 93] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify masks lowerCAmelCase : Union[str, Any] = 822873 self.assertEqual(encoding["""labels"""][0]["""masks"""].sum().item() , lowercase_ ) # verify orig_size lowerCAmelCase : str = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : List[str] = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) )
693
0
import json import os import sys import tempfile import unittest from pathlib import Path from shutil import copyfile from huggingface_hub import HfFolder, Repository, create_repo, delete_repo from requests.exceptions import HTTPError import transformers from transformers import ( CONFIG_MAPPING, FEATURE_EXTRACTOR_MAPPING, PROCESSOR_MAPPING, TOKENIZER_MAPPING, AutoConfig, AutoFeatureExtractor, AutoProcessor, AutoTokenizer, BertTokenizer, ProcessorMixin, WavaVecaConfig, WavaVecaFeatureExtractor, WavaVecaProcessor, ) from transformers.testing_utils import TOKEN, USER, get_tests_dir, is_staging_test from transformers.tokenization_utils import TOKENIZER_CONFIG_FILE from transformers.utils import FEATURE_EXTRACTOR_NAME, is_tokenizers_available sys.path.append(str(Path(__file__).parent.parent.parent.parent / 'utils')) from test_module.custom_configuration import CustomConfig # noqa E402 from test_module.custom_feature_extraction import CustomFeatureExtractor # noqa E402 from test_module.custom_processing import CustomProcessor # noqa E402 from test_module.custom_tokenization import CustomTokenizer # noqa E402 lowerCAmelCase : Any =get_tests_dir('fixtures/dummy_feature_extractor_config.json') lowerCAmelCase : Any =get_tests_dir('fixtures/vocab.json') lowerCAmelCase : Optional[Any] =get_tests_dir('fixtures') class _a ( unittest.TestCase ): _UpperCamelCase: Tuple = ["""[UNK]""", """[CLS]""", """[SEP]""", """[PAD]""", """[MASK]""", """bla""", """blou"""] def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : int = 0 def _snake_case ( self ) -> str: lowerCAmelCase : Dict = AutoProcessor.from_pretrained("""facebook/wav2vec2-base-960h""" ) self.assertIsInstance(__lowercase , __lowercase ) def _snake_case ( self ) -> Any: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : int = WavaVecaConfig() lowerCAmelCase : Dict = AutoProcessor.from_pretrained("""facebook/wav2vec2-base-960h""" ) # save in new folder model_config.save_pretrained(__lowercase ) processor.save_pretrained(__lowercase ) lowerCAmelCase : Any = AutoProcessor.from_pretrained(__lowercase ) self.assertIsInstance(__lowercase , __lowercase ) def _snake_case ( self ) -> int: with tempfile.TemporaryDirectory() as tmpdirname: # copy relevant files copyfile(__lowercase , os.path.join(__lowercase , __lowercase ) ) copyfile(__lowercase , os.path.join(__lowercase , """vocab.json""" ) ) lowerCAmelCase : Optional[Any] = AutoProcessor.from_pretrained(__lowercase ) self.assertIsInstance(__lowercase , __lowercase ) def _snake_case ( self ) -> Optional[int]: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : int = WavaVecaFeatureExtractor() lowerCAmelCase : List[str] = AutoTokenizer.from_pretrained("""facebook/wav2vec2-base-960h""" ) lowerCAmelCase : Tuple = WavaVecaProcessor(__lowercase , __lowercase ) # save in new folder processor.save_pretrained(__lowercase ) # drop `processor_class` in tokenizer with open(os.path.join(__lowercase , __lowercase ) , """r""" ) as f: lowerCAmelCase : Tuple = json.load(__lowercase ) config_dict.pop("""processor_class""" ) with open(os.path.join(__lowercase , __lowercase ) , """w""" ) as f: f.write(json.dumps(__lowercase ) ) lowerCAmelCase : List[Any] = AutoProcessor.from_pretrained(__lowercase ) self.assertIsInstance(__lowercase , __lowercase ) def _snake_case ( self ) -> Dict: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : int = WavaVecaFeatureExtractor() lowerCAmelCase : Optional[Any] = AutoTokenizer.from_pretrained("""facebook/wav2vec2-base-960h""" ) lowerCAmelCase : Any = WavaVecaProcessor(__lowercase , __lowercase ) # save in new folder processor.save_pretrained(__lowercase ) # drop `processor_class` in feature extractor with open(os.path.join(__lowercase , __lowercase ) , """r""" ) as f: lowerCAmelCase : Any = json.load(__lowercase ) config_dict.pop("""processor_class""" ) with open(os.path.join(__lowercase , __lowercase ) , """w""" ) as f: f.write(json.dumps(__lowercase ) ) lowerCAmelCase : List[Any] = AutoProcessor.from_pretrained(__lowercase ) self.assertIsInstance(__lowercase , __lowercase ) def _snake_case ( self ) -> Union[str, Any]: with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : str = WavaVecaConfig(processor_class="""Wav2Vec2Processor""" ) model_config.save_pretrained(__lowercase ) # copy relevant files copyfile(__lowercase , os.path.join(__lowercase , """vocab.json""" ) ) # create emtpy sample processor with open(os.path.join(__lowercase , __lowercase ) , """w""" ) as f: f.write("""{}""" ) lowerCAmelCase : str = AutoProcessor.from_pretrained(__lowercase ) self.assertIsInstance(__lowercase , __lowercase ) def _snake_case ( self ) -> List[str]: # If remote code is not set, we will time out when asking whether to load the model. with self.assertRaises(__lowercase ): lowerCAmelCase : str = AutoProcessor.from_pretrained("""hf-internal-testing/test_dynamic_processor""" ) # If remote code is disabled, we can't load this config. with self.assertRaises(__lowercase ): lowerCAmelCase : Optional[int] = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=__lowercase ) lowerCAmelCase : List[str] = AutoProcessor.from_pretrained("""hf-internal-testing/test_dynamic_processor""" , trust_remote_code=__lowercase ) self.assertTrue(processor.special_attribute_present ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) lowerCAmelCase : Any = processor.feature_extractor self.assertTrue(feature_extractor.special_attribute_present ) self.assertEqual(feature_extractor.__class__.__name__ , """NewFeatureExtractor""" ) lowerCAmelCase : Optional[Any] = processor.tokenizer self.assertTrue(tokenizer.special_attribute_present ) if is_tokenizers_available(): self.assertEqual(tokenizer.__class__.__name__ , """NewTokenizerFast""" ) # Test we can also load the slow version lowerCAmelCase : List[Any] = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=__lowercase , use_fast=__lowercase ) lowerCAmelCase : Dict = new_processor.tokenizer self.assertTrue(new_tokenizer.special_attribute_present ) self.assertEqual(new_tokenizer.__class__.__name__ , """NewTokenizer""" ) else: self.assertEqual(tokenizer.__class__.__name__ , """NewTokenizer""" ) def _snake_case ( self ) -> Union[str, Any]: try: AutoConfig.register("""custom""" , __lowercase ) AutoFeatureExtractor.register(__lowercase , __lowercase ) AutoTokenizer.register(__lowercase , slow_tokenizer_class=__lowercase ) AutoProcessor.register(__lowercase , __lowercase ) # Trying to register something existing in the Transformers library will raise an error with self.assertRaises(__lowercase ): AutoProcessor.register(__lowercase , __lowercase ) # Now that the config is registered, it can be used as any other config with the auto-API lowerCAmelCase : Optional[int] = CustomFeatureExtractor.from_pretrained(__lowercase ) with tempfile.TemporaryDirectory() as tmp_dir: lowerCAmelCase : int = os.path.join(__lowercase , """vocab.txt""" ) with open(__lowercase , """w""" , encoding="""utf-8""" ) as vocab_writer: vocab_writer.write("""""".join([x + """\n""" for x in self.vocab_tokens] ) ) lowerCAmelCase : List[str] = CustomTokenizer(__lowercase ) lowerCAmelCase : List[str] = CustomProcessor(__lowercase , __lowercase ) with tempfile.TemporaryDirectory() as tmp_dir: processor.save_pretrained(__lowercase ) lowerCAmelCase : int = AutoProcessor.from_pretrained(__lowercase ) self.assertIsInstance(__lowercase , __lowercase ) finally: if "custom" in CONFIG_MAPPING._extra_content: del CONFIG_MAPPING._extra_content["custom"] if CustomConfig in FEATURE_EXTRACTOR_MAPPING._extra_content: del FEATURE_EXTRACTOR_MAPPING._extra_content[CustomConfig] if CustomConfig in TOKENIZER_MAPPING._extra_content: del TOKENIZER_MAPPING._extra_content[CustomConfig] if CustomConfig in PROCESSOR_MAPPING._extra_content: del PROCESSOR_MAPPING._extra_content[CustomConfig] def _snake_case ( self ) -> str: class _a ( lowercase__ ): _UpperCamelCase: Union[str, Any] = False class _a ( lowercase__ ): _UpperCamelCase: Dict = False class _a ( lowercase__ ): _UpperCamelCase: Union[str, Any] = """AutoFeatureExtractor""" _UpperCamelCase: List[str] = """AutoTokenizer""" _UpperCamelCase: Union[str, Any] = False try: AutoConfig.register("""custom""" , __lowercase ) AutoFeatureExtractor.register(__lowercase , __lowercase ) AutoTokenizer.register(__lowercase , slow_tokenizer_class=__lowercase ) AutoProcessor.register(__lowercase , __lowercase ) # If remote code is not set, the default is to use local classes. lowerCAmelCase : List[Any] = AutoProcessor.from_pretrained("""hf-internal-testing/test_dynamic_processor""" ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) self.assertFalse(processor.special_attribute_present ) self.assertFalse(processor.feature_extractor.special_attribute_present ) self.assertFalse(processor.tokenizer.special_attribute_present ) # If remote code is disabled, we load the local ones. lowerCAmelCase : Tuple = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=__lowercase ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) self.assertFalse(processor.special_attribute_present ) self.assertFalse(processor.feature_extractor.special_attribute_present ) self.assertFalse(processor.tokenizer.special_attribute_present ) # If remote is enabled, we load from the Hub. lowerCAmelCase : Dict = AutoProcessor.from_pretrained( """hf-internal-testing/test_dynamic_processor""" , trust_remote_code=__lowercase ) self.assertEqual(processor.__class__.__name__ , """NewProcessor""" ) self.assertTrue(processor.special_attribute_present ) self.assertTrue(processor.feature_extractor.special_attribute_present ) self.assertTrue(processor.tokenizer.special_attribute_present ) finally: if "custom" in CONFIG_MAPPING._extra_content: del CONFIG_MAPPING._extra_content["custom"] if CustomConfig in FEATURE_EXTRACTOR_MAPPING._extra_content: del FEATURE_EXTRACTOR_MAPPING._extra_content[CustomConfig] if CustomConfig in TOKENIZER_MAPPING._extra_content: del TOKENIZER_MAPPING._extra_content[CustomConfig] if CustomConfig in PROCESSOR_MAPPING._extra_content: del PROCESSOR_MAPPING._extra_content[CustomConfig] def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Any = AutoProcessor.from_pretrained("""hf-internal-testing/tiny-random-bert""" ) self.assertEqual(processor.__class__.__name__ , """BertTokenizerFast""" ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : str = AutoProcessor.from_pretrained("""hf-internal-testing/tiny-random-convnext""" ) self.assertEqual(processor.__class__.__name__ , """ConvNextImageProcessor""" ) @is_staging_test class _a ( unittest.TestCase ): _UpperCamelCase: List[Any] = ["""[UNK]""", """[CLS]""", """[SEP]""", """[PAD]""", """[MASK]""", """bla""", """blou"""] @classmethod def _snake_case ( cls ) -> int: lowerCAmelCase : Any = TOKEN HfFolder.save_token(__lowercase ) @classmethod def _snake_case ( cls ) -> Optional[int]: try: delete_repo(token=cls._token , repo_id="""test-processor""" ) except HTTPError: pass try: delete_repo(token=cls._token , repo_id="""valid_org/test-processor-org""" ) except HTTPError: pass try: delete_repo(token=cls._token , repo_id="""test-dynamic-processor""" ) except HTTPError: pass def _snake_case ( self ) -> Tuple: lowerCAmelCase : Any = WavaVecaProcessor.from_pretrained(__lowercase ) with tempfile.TemporaryDirectory() as tmp_dir: processor.save_pretrained( os.path.join(__lowercase , """test-processor""" ) , push_to_hub=__lowercase , use_auth_token=self._token ) lowerCAmelCase : str = WavaVecaProcessor.from_pretrained(f"""{USER}/test-processor""" ) for k, v in processor.feature_extractor.__dict__.items(): self.assertEqual(__lowercase , getattr(new_processor.feature_extractor , __lowercase ) ) self.assertDictEqual(new_processor.tokenizer.get_vocab() , processor.tokenizer.get_vocab() ) def _snake_case ( self ) -> Dict: lowerCAmelCase : Any = WavaVecaProcessor.from_pretrained(__lowercase ) with tempfile.TemporaryDirectory() as tmp_dir: processor.save_pretrained( os.path.join(__lowercase , """test-processor-org""" ) , push_to_hub=__lowercase , use_auth_token=self._token , organization="""valid_org""" , ) lowerCAmelCase : Optional[int] = WavaVecaProcessor.from_pretrained("""valid_org/test-processor-org""" ) for k, v in processor.feature_extractor.__dict__.items(): self.assertEqual(__lowercase , getattr(new_processor.feature_extractor , __lowercase ) ) self.assertDictEqual(new_processor.tokenizer.get_vocab() , processor.tokenizer.get_vocab() ) def _snake_case ( self ) -> Optional[Any]: CustomFeatureExtractor.register_for_auto_class() CustomTokenizer.register_for_auto_class() CustomProcessor.register_for_auto_class() lowerCAmelCase : Tuple = CustomFeatureExtractor.from_pretrained(__lowercase ) with tempfile.TemporaryDirectory() as tmp_dir: lowerCAmelCase : str = os.path.join(__lowercase , """vocab.txt""" ) with open(__lowercase , """w""" , encoding="""utf-8""" ) as vocab_writer: vocab_writer.write("""""".join([x + """\n""" for x in self.vocab_tokens] ) ) lowerCAmelCase : Any = CustomTokenizer(__lowercase ) lowerCAmelCase : str = CustomProcessor(__lowercase , __lowercase ) with tempfile.TemporaryDirectory() as tmp_dir: create_repo(f"""{USER}/test-dynamic-processor""" , token=self._token ) lowerCAmelCase : int = Repository(__lowercase , clone_from=f"""{USER}/test-dynamic-processor""" , token=self._token ) processor.save_pretrained(__lowercase ) # This has added the proper auto_map field to the feature extractor config self.assertDictEqual( processor.feature_extractor.auto_map , { """AutoFeatureExtractor""": """custom_feature_extraction.CustomFeatureExtractor""", """AutoProcessor""": """custom_processing.CustomProcessor""", } , ) # This has added the proper auto_map field to the tokenizer config with open(os.path.join(__lowercase , """tokenizer_config.json""" ) ) as f: lowerCAmelCase : int = json.load(__lowercase ) self.assertDictEqual( tokenizer_config["""auto_map"""] , { """AutoTokenizer""": ["""custom_tokenization.CustomTokenizer""", None], """AutoProcessor""": """custom_processing.CustomProcessor""", } , ) # The code has been copied from fixtures self.assertTrue(os.path.isfile(os.path.join(__lowercase , """custom_feature_extraction.py""" ) ) ) self.assertTrue(os.path.isfile(os.path.join(__lowercase , """custom_tokenization.py""" ) ) ) self.assertTrue(os.path.isfile(os.path.join(__lowercase , """custom_processing.py""" ) ) ) repo.push_to_hub() lowerCAmelCase : Dict = AutoProcessor.from_pretrained(f"""{USER}/test-dynamic-processor""" , trust_remote_code=__lowercase ) # Can't make an isinstance check because the new_processor is from the CustomProcessor class of a dynamic module self.assertEqual(new_processor.__class__.__name__ , """CustomProcessor""" )
715
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = 0 while b > 0: if b & 1: res += a a += a b >>= 1 return res def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = 0 while b > 0: if b & 1: lowerCAmelCase : Optional[int] = ((res % c) + (a % c)) % c a += a b >>= 1 return res
693
0
from ...configuration_utils import PretrainedConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Any ={ """facebook/convnextv2-tiny-1k-224""": """https://huggingface.co/facebook/convnextv2-tiny-1k-224/resolve/main/config.json""", } class _a ( __snake_case , __snake_case ): _UpperCamelCase: int = 'convnextv2' def __init__( self , lowercase_=3 , lowercase_=4 , lowercase_=4 , lowercase_=None , lowercase_=None , lowercase_="gelu" , lowercase_=0.0_2 , lowercase_=1e-12 , lowercase_=0.0 , lowercase_=224 , lowercase_=None , lowercase_=None , **lowercase_ , ) -> int: super().__init__(**A_ ) lowerCAmelCase : List[str] = num_channels lowerCAmelCase : Tuple = patch_size lowerCAmelCase : Union[str, Any] = num_stages lowerCAmelCase : List[Any] = [96, 192, 384, 768] if hidden_sizes is None else hidden_sizes lowerCAmelCase : Optional[Any] = [3, 3, 9, 3] if depths is None else depths lowerCAmelCase : Optional[int] = hidden_act lowerCAmelCase : Dict = initializer_range lowerCAmelCase : Optional[Any] = layer_norm_eps lowerCAmelCase : str = drop_path_rate lowerCAmelCase : Optional[int] = image_size lowerCAmelCase : List[str] = ["stem"] + [f"""stage{idx}""" for idx in range(1 , len(self.depths ) + 1 )] lowerCAmelCase : Union[str, Any] = get_aligned_output_features_output_indices( out_features=A_ , out_indices=A_ , stage_names=self.stage_names )
716
from math import factorial class _a : def __init__( self , lowercase_ , lowercase_ ) -> Optional[Any]: lowerCAmelCase : Union[str, Any] = real if isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Tuple = [1] * rank else: lowerCAmelCase : Any = rank def __repr__( self ) -> int: return ( f"""{self.real}+""" f"""{'+'.join(str(lowercase_ )+'E'+str(n+1 )for n,dual in enumerate(self.duals ) )}""" ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[Any] = self.duals.copy() while cur[-1] == 0: cur.pop(-1 ) return Dual(self.real , lowercase_ ) def __add__( self , lowercase_ ) -> Tuple: if not isinstance(lowercase_ , lowercase_ ): return Dual(self.real + other , self.duals ) lowerCAmelCase : int = self.duals.copy() lowerCAmelCase : Tuple = other.duals.copy() if len(lowercase_ ) > len(lowercase_ ): o_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) elif len(lowercase_ ) < len(lowercase_ ): s_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) lowerCAmelCase : List[Any] = [] for i in range(len(lowercase_ ) ): new_duals.append(s_dual[i] + o_dual[i] ) return Dual(self.real + other.real , lowercase_ ) _UpperCamelCase: List[Any] = __add__ def __sub__( self , lowercase_ ) -> Union[str, Any]: return self + other * -1 def __mul__( self , lowercase_ ) -> Optional[int]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Union[str, Any] = [] for i in self.duals: new_duals.append(i * other ) return Dual(self.real * other , lowercase_ ) lowerCAmelCase : Union[str, Any] = [0] * (len(self.duals ) + len(other.duals ) + 1) for i, item in enumerate(self.duals ): for j, jtem in enumerate(other.duals ): new_duals[i + j + 1] += item * jtem for k in range(len(self.duals ) ): new_duals[k] += self.duals[k] * other.real for index in range(len(other.duals ) ): new_duals[index] += other.duals[index] * self.real return Dual(self.real * other.real , lowercase_ ) _UpperCamelCase: str = __mul__ def __truediv__( self , lowercase_ ) -> Optional[Any]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[str] = [] for i in self.duals: new_duals.append(i / other ) return Dual(self.real / other , lowercase_ ) raise ValueError def __floordiv__( self , lowercase_ ) -> int: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = [] for i in self.duals: new_duals.append(i // other ) return Dual(self.real // other , lowercase_ ) raise ValueError def __pow__( self , lowercase_ ) -> str: if n < 0 or isinstance(lowercase_ , lowercase_ ): raise ValueError("""power must be a positive integer""" ) if n == 0: return 1 if n == 1: return self lowerCAmelCase : int = self for _ in range(n - 1 ): x *= self return x def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if not callable(SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires a function as input for func""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,(float, int) ): raise ValueError("""differentiate() requires a float as input for position""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires an int as input for order""" ) lowerCAmelCase : List[Any] = Dual(SCREAMING_SNAKE_CASE__ ,1 ) lowerCAmelCase : Optional[Any] = func(SCREAMING_SNAKE_CASE__ ) if order == 0: return result.real return result.duals[order - 1] * factorial(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": import doctest doctest.testmod() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return y**2 * y**4 print(differentiate(f, 9, 2))
693
0
import hashlib import unittest from transformers import MODEL_FOR_DEPTH_ESTIMATION_MAPPING, is_torch_available, is_vision_available from transformers.pipelines import DepthEstimationPipeline, pipeline from transformers.testing_utils import ( is_pipeline_test, nested_simplify, require_tf, require_timm, require_torch, require_vision, slow, ) from .test_pipelines_common import ANY if is_torch_available(): import torch if is_vision_available(): from PIL import Image else: class _a : @staticmethod def _snake_case ( *lowercase_ , **lowercase_ ) -> Union[str, Any]: pass def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Any = hashlib.mda(image.tobytes() ) return m.hexdigest() @is_pipeline_test @require_vision @require_timm @require_torch class _a ( unittest.TestCase ): _UpperCamelCase = MODEL_FOR_DEPTH_ESTIMATION_MAPPING def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ ) -> List[str]: lowerCAmelCase : Tuple = DepthEstimationPipeline(model=lowercase_ , image_processor=lowercase_ ) return depth_estimator, [ "./tests/fixtures/tests_samples/COCO/000000039769.png", "./tests/fixtures/tests_samples/COCO/000000039769.png", ] def _snake_case ( self , lowercase_ , lowercase_ ) -> Optional[int]: lowerCAmelCase : Dict = depth_estimator("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) self.assertEqual({"""predicted_depth""": ANY(torch.Tensor ), """depth""": ANY(Image.Image )} , lowercase_ ) import datasets lowerCAmelCase : Union[str, Any] = datasets.load_dataset("""hf-internal-testing/fixtures_image_utils""" , """image""" , split="""test""" ) lowerCAmelCase : Tuple = depth_estimator( [ Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ), """http://images.cocodataset.org/val2017/000000039769.jpg""", # RGBA dataset[0]["""file"""], # LA dataset[1]["""file"""], # L dataset[2]["""file"""], ] ) self.assertEqual( [ {"""predicted_depth""": ANY(torch.Tensor ), """depth""": ANY(Image.Image )}, {"""predicted_depth""": ANY(torch.Tensor ), """depth""": ANY(Image.Image )}, {"""predicted_depth""": ANY(torch.Tensor ), """depth""": ANY(Image.Image )}, {"""predicted_depth""": ANY(torch.Tensor ), """depth""": ANY(Image.Image )}, {"""predicted_depth""": ANY(torch.Tensor ), """depth""": ANY(Image.Image )}, ] , lowercase_ , ) @require_tf @unittest.skip("""Depth estimation is not implemented in TF""" ) def _snake_case ( self ) -> Dict: pass @slow @require_torch def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Dict = """Intel/dpt-large""" lowerCAmelCase : Optional[Any] = pipeline("""depth-estimation""" , model=lowercase_ ) lowerCAmelCase : Optional[Any] = depth_estimator("""http://images.cocodataset.org/val2017/000000039769.jpg""" ) lowerCAmelCase : Tuple = hashimage(outputs["""depth"""] ) # This seems flaky. # self.assertEqual(outputs["depth"], "1a39394e282e9f3b0741a90b9f108977") self.assertEqual(nested_simplify(outputs["""predicted_depth"""].max().item() ) , 2_9.3_0_4 ) self.assertEqual(nested_simplify(outputs["""predicted_depth"""].min().item() ) , 2.6_6_2 ) @require_torch def _snake_case ( self ) -> Dict: # This is highly irregular to have no small tests. self.skipTest("""There is not hf-internal-testing tiny model for either GLPN nor DPT""" )
717
from ..utils import DummyObject, requires_backends class _a ( metaclass=snake_case_ ): _UpperCamelCase: List[Any] = ["keras_nlp"] def __init__( self , *lowercase_ , **lowercase_ ) -> Tuple: requires_backends(self , ["""keras_nlp"""] )
693
0
from scipy.stats import pearsonr, spearmanr from sklearn.metrics import fa_score, matthews_corrcoef import datasets lowerCAmelCase : str ='\\n@inproceedings{wang2019glue,\n title={{GLUE}: A Multi-Task Benchmark and Analysis Platform for Natural Language Understanding},\n author={Wang, Alex and Singh, Amanpreet and Michael, Julian and Hill, Felix and Levy, Omer and Bowman, Samuel R.},\n note={In the Proceedings of ICLR.},\n year={2019}\n}\n' lowerCAmelCase : Optional[Any] ='\\nGLUE, the General Language Understanding Evaluation benchmark\n(https://gluebenchmark.com/) is a collection of resources for training,\nevaluating, and analyzing natural language understanding systems.\n' lowerCAmelCase : int ='\nCompute GLUE evaluation metric associated to each GLUE dataset.\nArgs:\n predictions: list of predictions to score.\n Each translation should be tokenized into a list of tokens.\n references: list of lists of references for each translation.\n Each reference should be tokenized into a list of tokens.\nReturns: depending on the GLUE subset, one or several of:\n "accuracy": Accuracy\n "f1": F1 score\n "pearson": Pearson Correlation\n "spearmanr": Spearman Correlation\n "matthews_correlation": Matthew Correlation\nExamples:\n\n >>> glue_metric = datasets.load_metric(\'glue\', \'sst2\') # \'sst2\' or any of ["mnli", "mnli_mismatched", "mnli_matched", "qnli", "rte", "wnli", "hans"]\n >>> references = [0, 1]\n >>> predictions = [0, 1]\n >>> results = glue_metric.compute(predictions=predictions, references=references)\n >>> print(results)\n {\'accuracy\': 1.0}\n\n >>> glue_metric = datasets.load_metric(\'glue\', \'mrpc\') # \'mrpc\' or \'qqp\'\n >>> references = [0, 1]\n >>> predictions = [0, 1]\n >>> results = glue_metric.compute(predictions=predictions, references=references)\n >>> print(results)\n {\'accuracy\': 1.0, \'f1\': 1.0}\n\n >>> glue_metric = datasets.load_metric(\'glue\', \'stsb\')\n >>> references = [0., 1., 2., 3., 4., 5.]\n >>> predictions = [0., 1., 2., 3., 4., 5.]\n >>> results = glue_metric.compute(predictions=predictions, references=references)\n >>> print({"pearson": round(results["pearson"], 2), "spearmanr": round(results["spearmanr"], 2)})\n {\'pearson\': 1.0, \'spearmanr\': 1.0}\n\n >>> glue_metric = datasets.load_metric(\'glue\', \'cola\')\n >>> references = [0, 1]\n >>> predictions = [0, 1]\n >>> results = glue_metric.compute(predictions=predictions, references=references)\n >>> print(results)\n {\'matthews_correlation\': 1.0}\n' def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return float((preds == labels).mean() ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = simple_accuracy(__lowerCAmelCase ,__lowerCAmelCase ) lowerCAmelCase : Union[str, Any] = float(fa_score(y_true=__lowerCAmelCase ,y_pred=__lowerCAmelCase ) ) return { "accuracy": acc, "f1": fa, } def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = float(pearsonr(__lowerCAmelCase ,__lowerCAmelCase )[0] ) lowerCAmelCase : Union[str, Any] = float(spearmanr(__lowerCAmelCase ,__lowerCAmelCase )[0] ) return { "pearson": pearson_corr, "spearmanr": spearman_corr, } @datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION , _KWARGS_DESCRIPTION ) class _a ( datasets.Metric ): def _snake_case ( self ) -> List[Any]: if self.config_name not in [ "sst2", "mnli", "mnli_mismatched", "mnli_matched", "cola", "stsb", "mrpc", "qqp", "qnli", "rte", "wnli", "hans", ]: raise KeyError( """You should supply a configuration name selected in """ """[\"sst2\", \"mnli\", \"mnli_mismatched\", \"mnli_matched\", """ """\"cola\", \"stsb\", \"mrpc\", \"qqp\", \"qnli\", \"rte\", \"wnli\", \"hans\"]""" ) return datasets.MetricInfo( description=_DESCRIPTION , citation=_CITATION , inputs_description=_KWARGS_DESCRIPTION , features=datasets.Features( { """predictions""": datasets.Value("""int64""" if self.config_name != """stsb""" else """float32""" ), """references""": datasets.Value("""int64""" if self.config_name != """stsb""" else """float32""" ), } ) , codebase_urls=[] , reference_urls=[] , format="""numpy""" , ) def _snake_case ( self , lowercase_ , lowercase_ ) -> Union[str, Any]: if self.config_name == "cola": return {"matthews_correlation": matthews_corrcoef(lowerCamelCase__ , lowerCamelCase__ )} elif self.config_name == "stsb": return pearson_and_spearman(lowerCamelCase__ , lowerCamelCase__ ) elif self.config_name in ["mrpc", "qqp"]: return acc_and_fa(lowerCamelCase__ , lowerCamelCase__ ) elif self.config_name in ["sst2", "mnli", "mnli_mismatched", "mnli_matched", "qnli", "rte", "wnli", "hans"]: return {"accuracy": simple_accuracy(lowerCamelCase__ , lowerCamelCase__ )} else: raise KeyError( """You should supply a configuration name selected in """ """[\"sst2\", \"mnli\", \"mnli_mismatched\", \"mnli_matched\", """ """\"cola\", \"stsb\", \"mrpc\", \"qqp\", \"qnli\", \"rte\", \"wnli\", \"hans\"]""" )
718
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version('>=', '4.25.0')): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: from ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline else: from .pipeline_unclip import UnCLIPPipeline from .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline from .text_proj import UnCLIPTextProjModel
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if not isinstance(lowercase__ ,lowercase__ ): raise ValueError("""check_bouncy() accepts only integer arguments""" ) lowerCAmelCase : Tuple = str(lowercase__ ) lowerCAmelCase : str = """""".join(sorted(lowercase__ ) ) return sorted_str_n != str_n and sorted_str_n[::-1] != str_n def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ = 9_9 ): '''simple docstring''' if not 0 < percent < 1_0_0: raise ValueError("""solution() only accepts values from 0 to 100""" ) lowerCAmelCase : List[Any] = 0 lowerCAmelCase : Optional[Any] = 1 while True: if check_bouncy(lowercase__ ): bouncy_num += 1 if (bouncy_num / num) * 1_0_0 >= percent: return num num += 1 if __name__ == "__main__": from doctest import testmod testmod() print(F'''{solution(99)}''')
719
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if p < 2: raise ValueError("""p should not be less than 2!""" ) elif p == 2: return True lowerCAmelCase : List[Any] = 4 lowerCAmelCase : Tuple = (1 << p) - 1 for _ in range(p - 2 ): lowerCAmelCase : Dict = ((s * s) - 2) % m return s == 0 if __name__ == "__main__": print(lucas_lehmer_test(7)) print(lucas_lehmer_test(11))
693
0
import argparse import json import os import evaluate import torch from datasets import load_dataset from torch.optim import AdamW from torch.utils.data import DataLoader from transformers import AutoModelForSequenceClassification, AutoTokenizer, get_linear_schedule_with_warmup, set_seed from accelerate import Accelerator, DistributedType from accelerate.utils.deepspeed import DummyOptim, DummyScheduler lowerCAmelCase : Optional[int] =16 lowerCAmelCase : Tuple =32 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = 1_6 ,SCREAMING_SNAKE_CASE__ = "bert-base-cased" ): '''simple docstring''' lowerCAmelCase : Tuple = AutoTokenizer.from_pretrained(UpperCamelCase__ ) lowerCAmelCase : Dict = load_dataset("""glue""" ,"""mrpc""" ) def tokenize_function(SCREAMING_SNAKE_CASE__ ): # max_length=None => use the model max length (it's actually the default) lowerCAmelCase : Tuple = tokenizer(examples["""sentence1"""] ,examples["""sentence2"""] ,truncation=UpperCamelCase__ ,max_length=UpperCamelCase__ ) return outputs # Apply the method we just defined to all the examples in all the splits of the dataset lowerCAmelCase : List[Any] = datasets.map( UpperCamelCase__ ,batched=UpperCamelCase__ ,remove_columns=["""idx""", """sentence1""", """sentence2"""] ,load_from_cache_file=UpperCamelCase__ ) # We also rename the 'label' column to 'labels' which is the expected name for labels by the models of the # transformers library lowerCAmelCase : Union[str, Any] = tokenized_datasets.rename_column("""label""" ,"""labels""" ) def collate_fn(SCREAMING_SNAKE_CASE__ ): # On TPU it's best to pad everything to the same length or training will be very slow. if accelerator.distributed_type == DistributedType.TPU: return tokenizer.pad(UpperCamelCase__ ,padding="""max_length""" ,max_length=1_2_8 ,return_tensors="""pt""" ) return tokenizer.pad(UpperCamelCase__ ,padding="""longest""" ,return_tensors="""pt""" ) # Instantiate dataloaders. lowerCAmelCase : List[str] = DataLoader( tokenized_datasets["""train"""] ,shuffle=UpperCamelCase__ ,collate_fn=UpperCamelCase__ ,batch_size=UpperCamelCase__ ) lowerCAmelCase : List[Any] = DataLoader( tokenized_datasets["""validation"""] ,shuffle=UpperCamelCase__ ,collate_fn=UpperCamelCase__ ,batch_size=UpperCamelCase__ ) return train_dataloader, eval_dataloader def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' model.eval() lowerCAmelCase : Union[str, Any] = 0 for step, batch in enumerate(UpperCamelCase__ ): # We could avoid this line since we set the accelerator with `device_placement=True`. batch.to(accelerator.device ) with torch.no_grad(): lowerCAmelCase : int = model(**UpperCamelCase__ ) lowerCAmelCase : List[Any] = outputs.logits.argmax(dim=-1 ) # It is slightly faster to call this once, than multiple times lowerCAmelCase : List[str] = accelerator.gather( (predictions, batch["""labels"""]) ) # If we are in a multiprocess environment, the last batch has duplicates if accelerator.use_distributed: if step == len(UpperCamelCase__ ) - 1: lowerCAmelCase : Tuple = predictions[: len(eval_dataloader.dataset ) - samples_seen] lowerCAmelCase : str = references[: len(eval_dataloader.dataset ) - samples_seen] else: samples_seen += references.shape[0] metric.add_batch( predictions=UpperCamelCase__ ,references=UpperCamelCase__ ,) lowerCAmelCase : Tuple = metric.compute() return eval_metric["accuracy"] def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = Accelerator() # Sample hyper-parameters for learning rate, batch size, seed and a few other HPs lowerCAmelCase : str = config["""lr"""] lowerCAmelCase : Any = int(config["""num_epochs"""] ) lowerCAmelCase : int = int(config["""seed"""] ) lowerCAmelCase : Optional[Any] = int(config["""batch_size"""] ) lowerCAmelCase : Tuple = args.model_name_or_path set_seed(UpperCamelCase__ ) lowerCAmelCase : List[Any] = get_dataloaders(UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ) # Instantiate the model (we build the model here so that the seed also control new weights initialization) lowerCAmelCase : Union[str, Any] = AutoModelForSequenceClassification.from_pretrained(UpperCamelCase__ ,return_dict=UpperCamelCase__ ) # Instantiate optimizer lowerCAmelCase : Optional[int] = ( AdamW if accelerator.state.deepspeed_plugin is None or """optimizer""" not in accelerator.state.deepspeed_plugin.deepspeed_config else DummyOptim ) lowerCAmelCase : Dict = optimizer_cls(params=model.parameters() ,lr=UpperCamelCase__ ) if accelerator.state.deepspeed_plugin is not None: lowerCAmelCase : Tuple = accelerator.state.deepspeed_plugin.deepspeed_config[ """gradient_accumulation_steps""" ] else: lowerCAmelCase : Any = 1 lowerCAmelCase : List[str] = (len(UpperCamelCase__ ) * num_epochs) // gradient_accumulation_steps # Instantiate scheduler if ( accelerator.state.deepspeed_plugin is None or "scheduler" not in accelerator.state.deepspeed_plugin.deepspeed_config ): lowerCAmelCase : Optional[Any] = get_linear_schedule_with_warmup( optimizer=UpperCamelCase__ ,num_warmup_steps=0 ,num_training_steps=UpperCamelCase__ ,) else: lowerCAmelCase : str = DummyScheduler(UpperCamelCase__ ,total_num_steps=UpperCamelCase__ ,warmup_num_steps=0 ) # Prepare everything # There is no specific order to remember, we just need to unpack the objects in the same order we gave them to the # prepare method. lowerCAmelCase : Tuple = accelerator.prepare( UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ) # We need to keep track of how many total steps we have iterated over lowerCAmelCase : Optional[int] = 0 # We also need to keep track of the stating epoch so files are named properly lowerCAmelCase : int = 0 lowerCAmelCase : int = evaluate.load("""glue""" ,"""mrpc""" ) lowerCAmelCase : int = num_epochs if args.partial_train_epoch is not None: lowerCAmelCase : Dict = args.partial_train_epoch if args.resume_from_checkpoint: accelerator.load_state(args.resume_from_checkpoint ) lowerCAmelCase : str = args.resume_from_checkpoint.split("""epoch_""" )[1] lowerCAmelCase : Optional[int] = """""" for char in epoch_string: if char.isdigit(): state_epoch_num += char else: break lowerCAmelCase : List[str] = int(UpperCamelCase__ ) + 1 lowerCAmelCase : Any = evaluation_loop(UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ) accelerator.print("""resumed checkpoint performance:""" ,UpperCamelCase__ ) accelerator.print("""resumed checkpoint's scheduler's lr:""" ,lr_scheduler.get_lr()[0] ) accelerator.print("""resumed optimizers's lr:""" ,optimizer.param_groups[0]["""lr"""] ) with open(os.path.join(args.output_dir ,F"""state_{starting_epoch-1}.json""" ) ,"""r""" ) as f: lowerCAmelCase : Dict = json.load(UpperCamelCase__ ) assert resumed_state["accuracy"] == accuracy, "Accuracy mismatch, loading from checkpoint failed" assert ( resumed_state["lr"] == lr_scheduler.get_lr()[0] ), "Scheduler learning rate mismatch, loading from checkpoint failed" assert ( resumed_state["optimizer_lr"] == optimizer.param_groups[0]["lr"] ), "Optimizer learning rate mismatch, loading from checkpoint failed" assert resumed_state["epoch"] == starting_epoch - 1, "Epoch mismatch, loading from checkpoint failed" return # Now we train the model lowerCAmelCase : int = {} for epoch in range(UpperCamelCase__ ,UpperCamelCase__ ): model.train() for step, batch in enumerate(UpperCamelCase__ ): lowerCAmelCase : Tuple = model(**UpperCamelCase__ ) lowerCAmelCase : List[str] = outputs.loss lowerCAmelCase : List[str] = loss / gradient_accumulation_steps accelerator.backward(UpperCamelCase__ ) if step % gradient_accumulation_steps == 0: optimizer.step() lr_scheduler.step() optimizer.zero_grad() overall_step += 1 lowerCAmelCase : List[Any] = F"""epoch_{epoch}""" lowerCAmelCase : Dict = os.path.join(args.output_dir ,UpperCamelCase__ ) accelerator.save_state(UpperCamelCase__ ) lowerCAmelCase : List[Any] = evaluation_loop(UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ,UpperCamelCase__ ) lowerCAmelCase : Any = accuracy lowerCAmelCase : Optional[Any] = lr_scheduler.get_lr()[0] lowerCAmelCase : Union[str, Any] = optimizer.param_groups[0]["""lr"""] lowerCAmelCase : Tuple = epoch lowerCAmelCase : Optional[Any] = overall_step accelerator.print(F"""epoch {epoch}:""" ,UpperCamelCase__ ) accelerator.wait_for_everyone() if accelerator.is_main_process: with open(os.path.join(args.output_dir ,F"""state_{epoch}.json""" ) ,"""w""" ) as f: json.dump(UpperCamelCase__ ,UpperCamelCase__ ) def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : str = argparse.ArgumentParser(description="""Simple example of training script tracking peak GPU memory usage.""" ) parser.add_argument( """--model_name_or_path""" ,type=UpperCamelCase__ ,default="""bert-base-cased""" ,help="""Path to pretrained model or model identifier from huggingface.co/models.""" ,required=UpperCamelCase__ ,) parser.add_argument( """--output_dir""" ,type=UpperCamelCase__ ,default=""".""" ,help="""Optional save directory where all checkpoint folders will be stored. Default is the current working directory.""" ,) parser.add_argument( """--resume_from_checkpoint""" ,type=UpperCamelCase__ ,default=UpperCamelCase__ ,help="""If the training should continue from a checkpoint folder.""" ,) parser.add_argument( """--partial_train_epoch""" ,type=UpperCamelCase__ ,default=UpperCamelCase__ ,help="""If passed, the training will stop after this number of epochs.""" ,) parser.add_argument( """--num_epochs""" ,type=UpperCamelCase__ ,default=2 ,help="""Number of train epochs.""" ,) lowerCAmelCase : Optional[int] = parser.parse_args() lowerCAmelCase : str = {"""lr""": 2e-5, """num_epochs""": args.num_epochs, """seed""": 4_2, """batch_size""": 1_6} training_function(UpperCamelCase__ ,UpperCamelCase__ ) if __name__ == "__main__": main()
720
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin from . import IFPipelineTesterMixin @skip_mps class _a ( snake_case_ , snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = IFImgaImgSuperResolutionPipeline _UpperCamelCase: int = TEXT_GUIDED_IMAGE_VARIATION_PARAMS - {"width", "height"} _UpperCamelCase: Optional[int] = TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({"original_image"} ) _UpperCamelCase: List[str] = PipelineTesterMixin.required_optional_params - {"latents"} def _snake_case ( self ) -> int: return self._get_superresolution_dummy_components() def _snake_case ( self , lowercase_ , lowercase_=0 ) -> Optional[Any]: if str(lowercase_ ).startswith("""mps""" ): lowerCAmelCase : Any = torch.manual_seed(lowercase_ ) else: lowerCAmelCase : List[Any] = torch.Generator(device=lowercase_ ).manual_seed(lowercase_ ) lowerCAmelCase : Union[str, Any] = floats_tensor((1, 3, 32, 32) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[Any] = floats_tensor((1, 3, 16, 16) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[int] = { """prompt""": """A painting of a squirrel eating a burger""", """image""": image, """original_image""": original_image, """generator""": generator, """num_inference_steps""": 2, """output_type""": """numpy""", } return inputs @unittest.skipIf( torch_device != """cuda""" or not is_xformers_available() , reason="""XFormers attention is only available with CUDA and `xformers` installed""" , ) def _snake_case ( self ) -> Optional[int]: self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 ) def _snake_case ( self ) -> int: self._test_save_load_optional_components() @unittest.skipIf(torch_device != """cuda""" , reason="""float16 requires CUDA""" ) def _snake_case ( self ) -> Any: # Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder super().test_save_load_floataa(expected_max_diff=1e-1 ) def _snake_case ( self ) -> int: self._test_attention_slicing_forward_pass(expected_max_diff=1e-2 ) def _snake_case ( self ) -> Any: self._test_save_load_local() def _snake_case ( self ) -> str: self._test_inference_batch_single_identical( expected_max_diff=1e-2 , )
693
0
import os def _UpperCAmelCase ( ): '''simple docstring''' with open(os.path.dirname(_UpperCamelCase ) + """/grid.txt""" ) as f: lowerCAmelCase : Any = [] # noqa: E741 for _ in range(2_0 ): l.append([int(_UpperCamelCase ) for x in f.readline().split()] ) lowerCAmelCase : Union[str, Any] = 0 # right for i in range(2_0 ): for j in range(1_7 ): lowerCAmelCase : Union[str, Any] = l[i][j] * l[i][j + 1] * l[i][j + 2] * l[i][j + 3] if temp > maximum: lowerCAmelCase : Dict = temp # down for i in range(1_7 ): for j in range(2_0 ): lowerCAmelCase : Tuple = l[i][j] * l[i + 1][j] * l[i + 2][j] * l[i + 3][j] if temp > maximum: lowerCAmelCase : Tuple = temp # diagonal 1 for i in range(1_7 ): for j in range(1_7 ): lowerCAmelCase : int = l[i][j] * l[i + 1][j + 1] * l[i + 2][j + 2] * l[i + 3][j + 3] if temp > maximum: lowerCAmelCase : Dict = temp # diagonal 2 for i in range(1_7 ): for j in range(3 ,2_0 ): lowerCAmelCase : Dict = l[i][j] * l[i + 1][j - 1] * l[i + 2][j - 2] * l[i + 3][j - 3] if temp > maximum: lowerCAmelCase : Tuple = temp return maximum if __name__ == "__main__": print(solution())
721
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={} class _a ( snake_case_ ): _UpperCamelCase: Tuple = "llama" _UpperCamelCase: List[str] = ["past_key_values"] def __init__( self , lowercase_=32000 , lowercase_=4096 , lowercase_=11008 , lowercase_=32 , lowercase_=32 , lowercase_=None , lowercase_="silu" , lowercase_=2048 , lowercase_=0.0_2 , lowercase_=1e-6 , lowercase_=True , lowercase_=0 , lowercase_=1 , lowercase_=2 , lowercase_=1 , lowercase_=False , lowercase_=None , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : str = max_position_embeddings lowerCAmelCase : int = hidden_size lowerCAmelCase : Optional[int] = intermediate_size lowerCAmelCase : int = num_hidden_layers lowerCAmelCase : Any = num_attention_heads # for backward compatibility if num_key_value_heads is None: lowerCAmelCase : Any = num_attention_heads lowerCAmelCase : Any = num_key_value_heads lowerCAmelCase : Any = hidden_act lowerCAmelCase : Union[str, Any] = initializer_range lowerCAmelCase : str = rms_norm_eps lowerCAmelCase : int = pretraining_tp lowerCAmelCase : int = use_cache lowerCAmelCase : Optional[Any] = rope_scaling self._rope_scaling_validation() super().__init__( pad_token_id=lowercase_ , bos_token_id=lowercase_ , eos_token_id=lowercase_ , tie_word_embeddings=lowercase_ , **lowercase_ , ) def _snake_case ( self ) -> Dict: if self.rope_scaling is None: return if not isinstance(self.rope_scaling , lowercase_ ) or len(self.rope_scaling ) != 2: raise ValueError( """`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, """ f"""got {self.rope_scaling}""" ) lowerCAmelCase : Union[str, Any] = self.rope_scaling.get("""type""" , lowercase_ ) lowerCAmelCase : Dict = self.rope_scaling.get("""factor""" , lowercase_ ) if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]: raise ValueError( f"""`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}""" ) if rope_scaling_factor is None or not isinstance(lowercase_ , lowercase_ ) or rope_scaling_factor <= 1.0: raise ValueError(f"""`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}""" )
693
0
'''simple docstring''' import argparse import json from collections import OrderedDict from functools import partial from pathlib import Path import timm import torch from huggingface_hub import hf_hub_download from transformers import LevitConfig, LevitForImageClassificationWithTeacher, LevitImageProcessor from transformers.utils import logging logging.set_verbosity_info() lowerCAmelCase : Dict =logging.get_logger() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = True ): '''simple docstring''' print(F"""Converting {name}...""" ) with torch.no_grad(): if hidden_sizes == 1_2_8: if name[-1] == "S": lowerCAmelCase : List[str] = timm.create_model("""levit_128s""" ,pretrained=SCREAMING_SNAKE_CASE__ ) else: lowerCAmelCase : List[str] = timm.create_model("""levit_128""" ,pretrained=SCREAMING_SNAKE_CASE__ ) if hidden_sizes == 1_9_2: lowerCAmelCase : Any = timm.create_model("""levit_192""" ,pretrained=SCREAMING_SNAKE_CASE__ ) if hidden_sizes == 2_5_6: lowerCAmelCase : Optional[int] = timm.create_model("""levit_256""" ,pretrained=SCREAMING_SNAKE_CASE__ ) if hidden_sizes == 3_8_4: lowerCAmelCase : Optional[Any] = timm.create_model("""levit_384""" ,pretrained=SCREAMING_SNAKE_CASE__ ) from_model.eval() lowerCAmelCase : Any = LevitForImageClassificationWithTeacher(SCREAMING_SNAKE_CASE__ ).eval() lowerCAmelCase : List[str] = OrderedDict() lowerCAmelCase : int = from_model.state_dict() lowerCAmelCase : Union[str, Any] = list(from_model.state_dict().keys() ) lowerCAmelCase : Tuple = list(our_model.state_dict().keys() ) print(len(SCREAMING_SNAKE_CASE__ ) ,len(SCREAMING_SNAKE_CASE__ ) ) for i in range(len(SCREAMING_SNAKE_CASE__ ) ): lowerCAmelCase : Optional[int] = weights[og_keys[i]] our_model.load_state_dict(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : List[Any] = torch.randn((2, 3, 2_2_4, 2_2_4) ) lowerCAmelCase : Any = from_model(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[Any] = our_model(SCREAMING_SNAKE_CASE__ ).logits assert torch.allclose(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ), "The model logits don't match the original one." lowerCAmelCase : Union[str, Any] = name print(SCREAMING_SNAKE_CASE__ ) if push_to_hub: our_model.save_pretrained(save_directory / checkpoint_name ) lowerCAmelCase : Optional[Any] = LevitImageProcessor() image_processor.save_pretrained(save_directory / checkpoint_name ) print(F"""Pushed {checkpoint_name}""" ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ = None ,SCREAMING_SNAKE_CASE__ = True ): '''simple docstring''' lowerCAmelCase : Optional[Any] = """imagenet-1k-id2label.json""" lowerCAmelCase : str = 1_0_0_0 lowerCAmelCase : Optional[Any] = (1, num_labels) lowerCAmelCase : str = """huggingface/label-files""" lowerCAmelCase : Optional[int] = num_labels lowerCAmelCase : Optional[Any] = json.load(open(hf_hub_download(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,repo_type="""dataset""" ) ,"""r""" ) ) lowerCAmelCase : Optional[Any] = {int(SCREAMING_SNAKE_CASE__ ): v for k, v in idalabel.items()} lowerCAmelCase : Tuple = idalabel lowerCAmelCase : int = {v: k for k, v in idalabel.items()} lowerCAmelCase : List[str] = partial(SCREAMING_SNAKE_CASE__ ,num_labels=SCREAMING_SNAKE_CASE__ ,idalabel=SCREAMING_SNAKE_CASE__ ,labelaid=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Dict = { """levit-128S""": 1_2_8, """levit-128""": 1_2_8, """levit-192""": 1_9_2, """levit-256""": 2_5_6, """levit-384""": 3_8_4, } lowerCAmelCase : Union[str, Any] = { """levit-128S""": ImageNetPreTrainedConfig( hidden_sizes=[1_2_8, 2_5_6, 3_8_4] ,num_attention_heads=[4, 6, 8] ,depths=[2, 3, 4] ,key_dim=[1_6, 1_6, 1_6] ,drop_path_rate=0 ,), """levit-128""": ImageNetPreTrainedConfig( hidden_sizes=[1_2_8, 2_5_6, 3_8_4] ,num_attention_heads=[4, 8, 1_2] ,depths=[4, 4, 4] ,key_dim=[1_6, 1_6, 1_6] ,drop_path_rate=0 ,), """levit-192""": ImageNetPreTrainedConfig( hidden_sizes=[1_9_2, 2_8_8, 3_8_4] ,num_attention_heads=[3, 5, 6] ,depths=[4, 4, 4] ,key_dim=[3_2, 3_2, 3_2] ,drop_path_rate=0 ,), """levit-256""": ImageNetPreTrainedConfig( hidden_sizes=[2_5_6, 3_8_4, 5_1_2] ,num_attention_heads=[4, 6, 8] ,depths=[4, 4, 4] ,key_dim=[3_2, 3_2, 3_2] ,drop_path_rate=0 ,), """levit-384""": ImageNetPreTrainedConfig( hidden_sizes=[3_8_4, 5_1_2, 7_6_8] ,num_attention_heads=[6, 9, 1_2] ,depths=[4, 4, 4] ,key_dim=[3_2, 3_2, 3_2] ,drop_path_rate=0.1 ,), } if model_name: convert_weight_and_push( names_to_hidden_sizes[model_name] ,SCREAMING_SNAKE_CASE__ ,names_to_config[model_name] ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) else: for model_name, config in names_to_config.items(): convert_weight_and_push(names_to_hidden_sizes[model_name] ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) return config, expected_shape if __name__ == "__main__": lowerCAmelCase : str =argparse.ArgumentParser() # Required parameters parser.add_argument( '--model_name', default=None, type=str, help='The name of the model you wish to convert, it must be one of the supported Levit* architecture,', ) parser.add_argument( '--pytorch_dump_folder_path', default='levit-dump-folder/', type=Path, required=False, help='Path to the output PyTorch model directory.', ) parser.add_argument('--push_to_hub', action='store_true', help='Push model and image processor to the hub') parser.add_argument( '--no-push_to_hub', dest='push_to_hub', action='store_false', help='Do not push model and image processor to the hub', ) lowerCAmelCase : List[str] =parser.parse_args() lowerCAmelCase : Union[str, Any] =args.pytorch_dump_folder_path pytorch_dump_folder_path.mkdir(exist_ok=True, parents=True) convert_weights_and_push(pytorch_dump_folder_path, args.model_name, args.push_to_hub)
700
from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase : int =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={ 'microsoft/swin-tiny-patch4-window7-224': ( 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json' ), # See all Swin models at https://huggingface.co/models?filter=swin } class _a ( snake_case_ , snake_case_ ): _UpperCamelCase: int = "swin" _UpperCamelCase: str = { "num_attention_heads": "num_heads", "num_hidden_layers": "num_layers", } def __init__( self , lowercase_=224 , lowercase_=4 , lowercase_=3 , lowercase_=96 , lowercase_=[2, 2, 6, 2] , lowercase_=[3, 6, 12, 24] , lowercase_=7 , lowercase_=4.0 , lowercase_=True , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.1 , lowercase_="gelu" , lowercase_=False , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=32 , lowercase_=None , lowercase_=None , **lowercase_ , ) -> Tuple: super().__init__(**lowercase_ ) lowerCAmelCase : Optional[int] = image_size lowerCAmelCase : Optional[Any] = patch_size lowerCAmelCase : Optional[Any] = num_channels lowerCAmelCase : List[Any] = embed_dim lowerCAmelCase : str = depths lowerCAmelCase : List[str] = len(lowercase_ ) lowerCAmelCase : Any = num_heads lowerCAmelCase : str = window_size lowerCAmelCase : List[str] = mlp_ratio lowerCAmelCase : List[Any] = qkv_bias lowerCAmelCase : List[str] = hidden_dropout_prob lowerCAmelCase : int = attention_probs_dropout_prob lowerCAmelCase : Any = drop_path_rate lowerCAmelCase : int = hidden_act lowerCAmelCase : int = use_absolute_embeddings lowerCAmelCase : Dict = layer_norm_eps lowerCAmelCase : Any = initializer_range lowerCAmelCase : Dict = encoder_stride # we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel # this indicates the channel dimension after the last stage of the model lowerCAmelCase : Any = int(embed_dim * 2 ** (len(lowercase_ ) - 1) ) lowerCAmelCase : Dict = ["""stem"""] + [f"""stage{idx}""" for idx in range(1 , len(lowercase_ ) + 1 )] lowerCAmelCase , lowerCAmelCase : Optional[Any] = get_aligned_output_features_output_indices( out_features=lowercase_ , out_indices=lowercase_ , stage_names=self.stage_names ) class _a ( snake_case_ ): _UpperCamelCase: int = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-4
693
0
import unittest from transformers import GPTSwaTokenizer from transformers.testing_utils import get_tests_dir, require_sentencepiece, require_tokenizers, slow from ...test_tokenization_common import TokenizerTesterMixin lowerCAmelCase : Any = get_tests_dir('fixtures/test_sentencepiece_with_bytefallback.model') @require_sentencepiece @require_tokenizers class _a ( snake_case_ , unittest.TestCase ): _UpperCamelCase: List[Any] = GPTSwaTokenizer _UpperCamelCase: str = False _UpperCamelCase: List[str] = True _UpperCamelCase: List[str] = False def _snake_case ( self ) -> Optional[int]: super().setUp() # We have a SentencePiece fixture for testing lowerCAmelCase : List[str] = GPTSwaTokenizer(lowercase_ , eos_token="""<unk>""" , bos_token="""<unk>""" , pad_token="""<unk>""" ) tokenizer.save_pretrained(self.tmpdirname ) def _snake_case ( self , lowercase_ ) -> Dict: lowerCAmelCase : List[str] = """This is a test""" lowerCAmelCase : Optional[Any] = """This is a test""" return input_text, output_text def _snake_case ( self ) -> int: lowerCAmelCase : List[Any] = """<s>""" lowerCAmelCase : Tuple = 1 self.assertEqual(self.get_tokenizer()._convert_token_to_id(lowercase_ ) , lowercase_ ) self.assertEqual(self.get_tokenizer()._convert_id_to_token(lowercase_ ) , lowercase_ ) def _snake_case ( self ) -> Dict: lowerCAmelCase : Optional[Any] = list(self.get_tokenizer().get_vocab().keys() ) self.assertEqual(vocab_keys[0] , """<unk>""" ) self.assertEqual(vocab_keys[1] , """<s>""" ) self.assertEqual(vocab_keys[-1] , """j""" ) self.assertEqual(len(lowercase_ ) , 2000 ) def _snake_case ( self ) -> Optional[Any]: self.assertEqual(self.get_tokenizer().vocab_size , 2000 ) def _snake_case ( self ) -> str: lowerCAmelCase : str = GPTSwaTokenizer(lowercase_ ) lowerCAmelCase : Optional[Any] = tokenizer.tokenize("""This is a test""" ) self.assertListEqual(lowercase_ , ["""▁This""", """▁is""", """▁a""", """▁t""", """est"""] ) self.assertListEqual(tokenizer.convert_tokens_to_ids(lowercase_ ) , [465, 287, 265, 631, 842] ) lowerCAmelCase : Optional[Any] = tokenizer.tokenize("""I was born in 92000, and this is falsé.""" ) # fmt: off self.assertListEqual( lowercase_ , ["""▁I""", """▁was""", """▁bor""", """n""", """▁in""", """▁""", """<0x39>""", """2""", """0""", """0""", """0""", """,""", """▁and""", """▁this""", """▁is""", """▁f""", """al""", """s""", """<0xC3>""", """<0xA9>""", """."""] , ) # fmt: on lowerCAmelCase : int = tokenizer.convert_tokens_to_ids(lowercase_ ) self.assertListEqual( lowercase_ , [262, 272, 1525, 286, 271, 268, 60, 916, 633, 633, 633, 259, 266, 301, 287, 384, 367, 263, 198, 172, 260] , ) lowerCAmelCase : List[str] = tokenizer.convert_ids_to_tokens(lowercase_ ) # fmt: off self.assertListEqual( lowercase_ , ["""▁I""", """▁was""", """▁bor""", """n""", """▁in""", """▁""", """<0x39>""", """2""", """0""", """0""", """0""", """,""", """▁and""", """▁this""", """▁is""", """▁f""", """al""", """s""", """<0xC3>""", """<0xA9>""", """."""] ) # fmt: on def _snake_case ( self ) -> List[str]: lowerCAmelCase : str = GPTSwaTokenizer(lowercase_ ) lowerCAmelCase : Any = ["""This is a test""", """I was born in 92000, and this is falsé."""] lowerCAmelCase : Tuple = [ [465, 287, 265, 631, 842], [262, 272, 1525, 286, 271, 268, 60, 916, 633, 633, 633, 259, 266, 301, 287, 384, 367, 263, 198, 172, 260], ] # Test that encode_fast returns the same as tokenize + convert_tokens_to_ids for text, expected_ids in zip(lowercase_ , lowercase_ ): self.assertListEqual(tokenizer.encode_fast(lowercase_ ) , lowercase_ ) # Test that decode_fast returns the input text for text, token_ids in zip(lowercase_ , lowercase_ ): self.assertEqual(tokenizer.decode_fast(lowercase_ ) , lowercase_ ) @slow def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = [ """<|python|>def fibonacci(n)\n if n < 0:\n print(\'Incorrect input\')""", """Hey there, how are you doing this fine day?""", """This is a text with a trailing spaces followed by a dot .""", """Häj sväjs lillebrör! =)""", """Det är inget fel på Mr. Cool""", ] # fmt: off lowerCAmelCase : List[Any] = {"""input_ids""": [[63423, 5, 6811, 14954, 282, 816, 3821, 63466, 63425, 63462, 18, 63978, 678, 301, 1320, 63423, 63455, 63458, 18, 63982, 4246, 3940, 1901, 47789, 5547, 18994], [19630, 1100, 63446, 1342, 633, 544, 4488, 593, 5102, 2416, 63495, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1652, 428, 268, 1936, 515, 268, 58593, 22413, 9106, 546, 268, 33213, 63979, 698, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [55130, 63450, 924, 63449, 2249, 4062, 1558, 318, 63504, 21498, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [509, 377, 2827, 2559, 332, 6575, 63443, 26801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], """token_type_ids""": [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], """attention_mask""": [[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]} # fmt: on self.tokenizer_integration_test_util( expected_encoding=lowercase_ , model_name="""AI-Sweden/gpt-sw3-126m""" , sequences=lowercase_ , )
701
lowerCAmelCase : str ={ 'Pillow': 'Pillow<10.0.0', 'accelerate': 'accelerate>=0.20.3', 'av': 'av==9.2.0', 'beautifulsoup4': 'beautifulsoup4', 'black': 'black~=23.1', 'codecarbon': 'codecarbon==1.2.0', 'cookiecutter': 'cookiecutter==1.7.3', 'dataclasses': 'dataclasses', 'datasets': 'datasets!=2.5.0', 'decord': 'decord==0.6.0', 'deepspeed': 'deepspeed>=0.9.3', 'diffusers': 'diffusers', 'dill': 'dill<0.3.5', 'evaluate': 'evaluate>=0.2.0', 'fairscale': 'fairscale>0.3', 'faiss-cpu': 'faiss-cpu', 'fastapi': 'fastapi', 'filelock': 'filelock', 'flax': 'flax>=0.4.1,<=0.7.0', 'ftfy': 'ftfy', 'fugashi': 'fugashi>=1.0', 'GitPython': 'GitPython<3.1.19', 'hf-doc-builder': 'hf-doc-builder>=0.3.0', 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0', 'importlib_metadata': 'importlib_metadata', 'ipadic': 'ipadic>=1.0.0,<2.0', 'isort': 'isort>=5.5.4', 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13', 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13', 'jieba': 'jieba', 'kenlm': 'kenlm', 'keras-nlp': 'keras-nlp>=0.3.1', 'librosa': 'librosa', 'nltk': 'nltk', 'natten': 'natten>=0.14.6', 'numpy': 'numpy>=1.17', 'onnxconverter-common': 'onnxconverter-common', 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2', 'onnxruntime': 'onnxruntime>=1.4.0', 'opencv-python': 'opencv-python', 'optuna': 'optuna', 'optax': 'optax>=0.0.8,<=0.1.4', 'packaging': 'packaging>=20.0', 'parameterized': 'parameterized', 'phonemizer': 'phonemizer', 'protobuf': 'protobuf', 'psutil': 'psutil', 'pyyaml': 'pyyaml>=5.1', 'pydantic': 'pydantic<2', 'pytest': 'pytest>=7.2.0', 'pytest-timeout': 'pytest-timeout', 'pytest-xdist': 'pytest-xdist', 'python': 'python>=3.8.0', 'ray[tune]': 'ray[tune]', 'regex': 'regex!=2019.12.17', 'requests': 'requests', 'rhoknp': 'rhoknp>=1.1.0,<1.3.1', 'rjieba': 'rjieba', 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1', 'ruff': 'ruff>=0.0.241,<=0.0.259', 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0', 'sacremoses': 'sacremoses', 'safetensors': 'safetensors>=0.3.1', 'sagemaker': 'sagemaker>=2.31.0', 'scikit-learn': 'scikit-learn', 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92', 'sigopt': 'sigopt', 'starlette': 'starlette', 'sudachipy': 'sudachipy>=0.6.6', 'sudachidict_core': 'sudachidict_core>=20220729', 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14', 'tensorflow': 'tensorflow>=2.6,<2.14', 'tensorflow-text': 'tensorflow-text<2.14', 'tf2onnx': 'tf2onnx', 'timeout-decorator': 'timeout-decorator', 'timm': 'timm', 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14', 'torch': 'torch>=1.9,!=1.12.0', 'torchaudio': 'torchaudio', 'torchvision': 'torchvision', 'pyctcdecode': 'pyctcdecode>=0.4.0', 'tqdm': 'tqdm>=4.27', 'unidic': 'unidic>=1.0.2', 'unidic_lite': 'unidic_lite>=1.0.7', 'urllib3': 'urllib3<2.0.0', 'uvicorn': 'uvicorn', }
693
0
import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import LevitImageProcessor class _a ( unittest.TestCase ): def __init__( self , lowercase_ , lowercase_=7 , lowercase_=3 , lowercase_=18 , lowercase_=30 , lowercase_=400 , lowercase_=True , lowercase_=None , lowercase_=True , lowercase_=None , lowercase_=True , lowercase_=[0.5, 0.5, 0.5] , lowercase_=[0.5, 0.5, 0.5] , ) -> List[Any]: lowerCAmelCase : Optional[int] = size if size is not None else {"""shortest_edge""": 18} lowerCAmelCase : Optional[int] = crop_size if crop_size is not None else {"""height""": 18, """width""": 18} lowerCAmelCase : Union[str, Any] = parent lowerCAmelCase : int = batch_size lowerCAmelCase : List[str] = num_channels lowerCAmelCase : int = image_size lowerCAmelCase : List[Any] = min_resolution lowerCAmelCase : str = max_resolution lowerCAmelCase : str = do_resize lowerCAmelCase : Optional[int] = size lowerCAmelCase : Optional[Any] = do_center_crop lowerCAmelCase : List[str] = crop_size lowerCAmelCase : int = do_normalize lowerCAmelCase : List[Any] = image_mean lowerCAmelCase : List[str] = image_std def _snake_case ( self ) -> Dict: return { "image_mean": self.image_mean, "image_std": self.image_std, "do_normalize": self.do_normalize, "do_resize": self.do_resize, "do_center_crop": self.do_center_crop, "size": self.size, "crop_size": self.crop_size, } @require_torch @require_vision class _a ( __snake_case , unittest.TestCase ): _UpperCamelCase: List[str] = LevitImageProcessor if is_vision_available() else None def _snake_case ( self ) -> str: lowerCAmelCase : Optional[Any] = LevitImageProcessingTester(self ) @property def _snake_case ( self ) -> Union[str, Any]: return self.image_processor_tester.prepare_image_processor_dict() def _snake_case ( self ) -> int: lowerCAmelCase : Union[str, Any] = self.image_processing_class(**self.image_processor_dict ) self.assertTrue(hasattr(__UpperCamelCase , """image_mean""" ) ) self.assertTrue(hasattr(__UpperCamelCase , """image_std""" ) ) self.assertTrue(hasattr(__UpperCamelCase , """do_normalize""" ) ) self.assertTrue(hasattr(__UpperCamelCase , """do_resize""" ) ) self.assertTrue(hasattr(__UpperCamelCase , """do_center_crop""" ) ) self.assertTrue(hasattr(__UpperCamelCase , """size""" ) ) def _snake_case ( self ) -> List[Any]: lowerCAmelCase : str = self.image_processing_class.from_dict(self.image_processor_dict ) self.assertEqual(image_processor.size , {"""shortest_edge""": 18} ) self.assertEqual(image_processor.crop_size , {"""height""": 18, """width""": 18} ) lowerCAmelCase : List[str] = self.image_processing_class.from_dict(self.image_processor_dict , size=42 , crop_size=84 ) self.assertEqual(image_processor.size , {"""shortest_edge""": 42} ) self.assertEqual(image_processor.crop_size , {"""height""": 84, """width""": 84} ) def _snake_case ( self ) -> List[str]: pass def _snake_case ( self ) -> Dict: lowerCAmelCase : List[str] = self.image_processing_class(**self.image_processor_dict ) # create random PIL images lowerCAmelCase : Union[str, Any] = prepare_image_inputs(self.image_processor_tester , equal_resolution=__UpperCamelCase ) for image in image_inputs: self.assertIsInstance(__UpperCamelCase , Image.Image ) # Test not batched input lowerCAmelCase : int = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( 1, self.image_processor_tester.num_channels, self.image_processor_tester.crop_size["""height"""], self.image_processor_tester.crop_size["""width"""], ) , ) # Test batched lowerCAmelCase : Any = image_processing(__UpperCamelCase , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, self.image_processor_tester.crop_size["""height"""], self.image_processor_tester.crop_size["""width"""], ) , ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : int = self.image_processing_class(**self.image_processor_dict ) # create random numpy tensors lowerCAmelCase : int = prepare_image_inputs(self.image_processor_tester , equal_resolution=__UpperCamelCase , numpify=__UpperCamelCase ) for image in image_inputs: self.assertIsInstance(__UpperCamelCase , np.ndarray ) # Test not batched input lowerCAmelCase : Dict = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( 1, self.image_processor_tester.num_channels, self.image_processor_tester.crop_size["""height"""], self.image_processor_tester.crop_size["""width"""], ) , ) # Test batched lowerCAmelCase : str = image_processing(__UpperCamelCase , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, self.image_processor_tester.crop_size["""height"""], self.image_processor_tester.crop_size["""width"""], ) , ) def _snake_case ( self ) -> Dict: lowerCAmelCase : List[str] = self.image_processing_class(**self.image_processor_dict ) # create random PyTorch tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=__UpperCamelCase , torchify=__UpperCamelCase ) for image in image_inputs: self.assertIsInstance(__UpperCamelCase , torch.Tensor ) # Test not batched input lowerCAmelCase : Union[str, Any] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( 1, self.image_processor_tester.num_channels, self.image_processor_tester.crop_size["""height"""], self.image_processor_tester.crop_size["""width"""], ) , ) # Test batched lowerCAmelCase : Dict = image_processing(__UpperCamelCase , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, self.image_processor_tester.crop_size["""height"""], self.image_processor_tester.crop_size["""width"""], ) , )
702
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase : Union[str, Any] ={ 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'], 'tokenization_roformer': ['RoFormerTokenizer'], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =['RoFormerTokenizerFast'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Optional[int] =[ 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'RoFormerForCausalLM', 'RoFormerForMaskedLM', 'RoFormerForMultipleChoice', 'RoFormerForQuestionAnswering', 'RoFormerForSequenceClassification', 'RoFormerForTokenClassification', 'RoFormerLayer', 'RoFormerModel', 'RoFormerPreTrainedModel', 'load_tf_weights_in_roformer', ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'TFRoFormerForCausalLM', 'TFRoFormerForMaskedLM', 'TFRoFormerForMultipleChoice', 'TFRoFormerForQuestionAnswering', 'TFRoFormerForSequenceClassification', 'TFRoFormerForTokenClassification', 'TFRoFormerLayer', 'TFRoFormerModel', 'TFRoFormerPreTrainedModel', ] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : int =[ 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'FlaxRoFormerForMaskedLM', 'FlaxRoFormerForMultipleChoice', 'FlaxRoFormerForQuestionAnswering', 'FlaxRoFormerForSequenceClassification', 'FlaxRoFormerForTokenClassification', 'FlaxRoFormerModel', 'FlaxRoFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig from .tokenization_roformer import RoFormerTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_roformer_fast import RoFormerTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_roformer import ( ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, RoFormerForCausalLM, RoFormerForMaskedLM, RoFormerForMultipleChoice, RoFormerForQuestionAnswering, RoFormerForSequenceClassification, RoFormerForTokenClassification, RoFormerLayer, RoFormerModel, RoFormerPreTrainedModel, load_tf_weights_in_roformer, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_roformer import ( TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerLayer, TFRoFormerModel, TFRoFormerPreTrainedModel, ) try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_roformer import ( FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, FlaxRoFormerForMaskedLM, FlaxRoFormerForMultipleChoice, FlaxRoFormerForQuestionAnswering, FlaxRoFormerForSequenceClassification, FlaxRoFormerForTokenClassification, FlaxRoFormerModel, FlaxRoFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Tuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
0
from collections.abc import Generator def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = 0, 1 while True: lowerCAmelCase : List[str] = b, a + b yield b def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ = 1_0_0_0 ): '''simple docstring''' lowerCAmelCase : List[str] = 1 lowerCAmelCase : Dict = fibonacci_generator() while len(str(next(lowerCAmelCase_ ) ) ) < n: answer += 1 return answer + 1 if __name__ == "__main__": print(solution(int(str(input()).strip())))
703
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return int(input_a == input_a == 0 ) def _UpperCAmelCase ( ): '''simple docstring''' print("""Truth Table of NOR Gate:""" ) print("""| Input 1 | Input 2 | Output |""" ) print(F"""| 0 | 0 | {nor_gate(0 ,0 )} |""" ) print(F"""| 0 | 1 | {nor_gate(0 ,1 )} |""" ) print(F"""| 1 | 0 | {nor_gate(1 ,0 )} |""" ) print(F"""| 1 | 1 | {nor_gate(1 ,1 )} |""" ) if __name__ == "__main__": import doctest doctest.testmod() main()
693
0
from ...processing_utils import ProcessorMixin class _a ( _UpperCAmelCase ): _UpperCamelCase: Union[str, Any] = ["""image_processor""", """feature_extractor"""] _UpperCamelCase: List[str] = """TvltImageProcessor""" _UpperCamelCase: Optional[Any] = """TvltFeatureExtractor""" def __init__( self , lowercase_ , lowercase_ ) -> Optional[int]: super().__init__(image_processor=lowercase__ , feature_extractor=lowercase__ ) lowerCAmelCase : str = image_processor lowerCAmelCase : Dict = feature_extractor def __call__( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=False , lowercase_=False , *lowercase_ , **lowercase_ , ) -> Union[str, Any]: if images is None and audio is None: raise ValueError("""You need to specify either an `images` or `audio` input to process.""" ) lowerCAmelCase : Optional[Any] = None if images is not None: lowerCAmelCase : str = self.image_processor(lowercase__ , mask_pixel=lowercase__ , *lowercase__ , **lowercase__ ) if images_mixed is not None: lowerCAmelCase : Union[str, Any] = self.image_processor(lowercase__ , is_mixed=lowercase__ , *lowercase__ , **lowercase__ ) if audio is not None: lowerCAmelCase : Tuple = self.feature_extractor( lowercase__ , *lowercase__ , sampling_rate=lowercase__ , mask_audio=lowercase__ , **lowercase__ ) lowerCAmelCase : int = {} if audio is not None: output_dict.update(lowercase__ ) if images is not None: output_dict.update(lowercase__ ) if images_mixed_dict is not None: output_dict.update(lowercase__ ) return output_dict @property def _snake_case ( self ) -> Dict: lowerCAmelCase : List[Any] = self.image_processor.model_input_names lowerCAmelCase : Union[str, Any] = self.feature_extractor.model_input_names return list(dict.fromkeys(image_processor_input_names + feature_extractor_input_names ) )
704
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available lowerCAmelCase : int ={ 'configuration_poolformer': [ 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'PoolFormerConfig', 'PoolFormerOnnxConfig', ] } try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : List[Any] =['PoolFormerFeatureExtractor'] lowerCAmelCase : List[str] =['PoolFormerImageProcessor'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'PoolFormerForImageClassification', 'PoolFormerModel', 'PoolFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_poolformer import ( POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, PoolFormerConfig, PoolFormerOnnxConfig, ) try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .feature_extraction_poolformer import PoolFormerFeatureExtractor from .image_processing_poolformer import PoolFormerImageProcessor try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_poolformer import ( POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, PoolFormerForImageClassification, PoolFormerModel, PoolFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure)
693
0
import builtins import sys from ...utils.imports import _is_package_available from . import cursor, input from .helpers import Direction, clear_line, forceWrite, linebreak, move_cursor, reset_cursor, writeColor from .keymap import KEYMAP lowerCAmelCase : int =False try: lowerCAmelCase : str =_is_package_available('google.colab') except ModuleNotFoundError: pass @input.register class _a : def __init__( self , lowercase_ = None , lowercase_ = [] ) -> Union[str, Any]: lowerCAmelCase : Optional[Any] = 0 lowerCAmelCase : Any = choices lowerCAmelCase : List[Any] = prompt if sys.platform == "win32": lowerCAmelCase : Optional[Any] = '''*''' else: lowerCAmelCase : List[Any] = '''➔ ''' def _snake_case ( self , lowercase_ , lowercase_ = "" ) -> Tuple: if sys.platform != "win32": writeColor(self.choices[index] , 32 , __snake_case ) else: forceWrite(self.choices[index] , __snake_case ) def _snake_case ( self , lowercase_ ) -> Union[str, Any]: if index == self.position: forceWrite(f""" {self.arrow_char} """ ) self.write_choice(__snake_case ) else: forceWrite(f""" {self.choices[index]}""" ) reset_cursor() def _snake_case ( self , lowercase_ , lowercase_ = 1 ) -> Tuple: lowerCAmelCase : int = self.position if direction == Direction.DOWN: if self.position + 1 >= len(self.choices ): return self.position += num_spaces else: if self.position - 1 < 0: return self.position -= num_spaces clear_line() self.print_choice(__snake_case ) move_cursor(__snake_case , direction.name ) self.print_choice(self.position ) @input.mark(KEYMAP["""up"""] ) def _snake_case ( self ) -> Union[str, Any]: self.move_direction(Direction.UP ) @input.mark(KEYMAP["""down"""] ) def _snake_case ( self ) -> Union[str, Any]: self.move_direction(Direction.DOWN ) @input.mark(KEYMAP["""newline"""] ) def _snake_case ( self ) -> List[Any]: move_cursor(len(self.choices ) - self.position , """DOWN""" ) return self.position @input.mark(KEYMAP["""interrupt"""] ) def _snake_case ( self ) -> str: move_cursor(len(self.choices ) - self.position , """DOWN""" ) raise KeyboardInterrupt @input.mark_multiple(*[KEYMAP[str(__snake_case )] for number in range(10 )] ) def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Dict = int(chr(self.current_selection ) ) lowerCAmelCase : Union[str, Any] = index - self.position if index == self.position: return if index < len(self.choices ): if self.position > index: self.move_direction(Direction.UP , -movement ) elif self.position < index: self.move_direction(Direction.DOWN , __snake_case ) else: return else: return def _snake_case ( self , lowercase_ = 0 ) -> str: if self.prompt: linebreak() forceWrite(self.prompt , """\n""" ) if in_colab: forceWrite("""Please input a choice index (starting from 0), and press enter""" , """\n""" ) else: forceWrite("""Please select a choice using the arrow or number keys, and selecting with enter""" , """\n""" ) lowerCAmelCase : Tuple = default_choice for i in range(len(self.choices ) ): self.print_choice(__snake_case ) forceWrite("""\n""" ) move_cursor(len(self.choices ) - self.position , """UP""" ) with cursor.hide(): while True: if in_colab: try: lowerCAmelCase : List[Any] = int(builtins.input() ) except ValueError: lowerCAmelCase : List[Any] = default_choice else: lowerCAmelCase : Optional[int] = self.handle_input() if choice is not None: reset_cursor() for _ in range(len(self.choices ) + 1 ): move_cursor(1 , """UP""" ) clear_line() self.write_choice(__snake_case , """\n""" ) return choice
705
import os import string import sys lowerCAmelCase : Optional[int] =1 << 8 lowerCAmelCase : List[Any] ={ 'tab': ord('\t'), 'newline': ord('\r'), 'esc': 27, 'up': 65 + ARROW_KEY_FLAG, 'down': 66 + ARROW_KEY_FLAG, 'right': 67 + ARROW_KEY_FLAG, 'left': 68 + ARROW_KEY_FLAG, 'mod_int': 91, 'undefined': sys.maxsize, 'interrupt': 3, 'insert': 50, 'delete': 51, 'pg_up': 53, 'pg_down': 54, } lowerCAmelCase : Optional[Any] =KEYMAP['up'] lowerCAmelCase : Tuple =KEYMAP['left'] if sys.platform == "win32": lowerCAmelCase : Dict =[] lowerCAmelCase : int ={ b'\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\x00H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\x00P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\x00M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG, b'\x00K': KEYMAP['left'] - ARROW_KEY_FLAG, } for i in range(10): lowerCAmelCase : Optional[Any] =ord(str(i)) def _UpperCAmelCase ( ): '''simple docstring''' if os.name == "nt": import msvcrt lowerCAmelCase : Any = """mbcs""" # Flush the keyboard buffer while msvcrt.kbhit(): msvcrt.getch() if len(SCREAMING_SNAKE_CASE__ ) == 0: # Read the keystroke lowerCAmelCase : int = msvcrt.getch() # If it is a prefix char, get second part if ch in (b"\x00", b"\xe0"): lowerCAmelCase : Tuple = ch + msvcrt.getch() # Translate actual Win chars to bullet char types try: lowerCAmelCase : str = chr(WIN_KEYMAP[cha] ) WIN_CH_BUFFER.append(chr(KEYMAP["""mod_int"""] ) ) WIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ ) if ord(SCREAMING_SNAKE_CASE__ ) in ( KEYMAP["insert"] - 1 << 9, KEYMAP["delete"] - 1 << 9, KEYMAP["pg_up"] - 1 << 9, KEYMAP["pg_down"] - 1 << 9, ): WIN_CH_BUFFER.append(chr(1_2_6 ) ) lowerCAmelCase : Optional[Any] = chr(KEYMAP["""esc"""] ) except KeyError: lowerCAmelCase : Optional[int] = cha[1] else: lowerCAmelCase : Any = ch.decode(SCREAMING_SNAKE_CASE__ ) else: lowerCAmelCase : Optional[int] = WIN_CH_BUFFER.pop(0 ) elif os.name == "posix": import termios import tty lowerCAmelCase : List[Any] = sys.stdin.fileno() lowerCAmelCase : str = termios.tcgetattr(SCREAMING_SNAKE_CASE__ ) try: tty.setraw(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[Any] = sys.stdin.read(1 ) finally: termios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ ) return ch def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP["interrupt"], KEYMAP["newline"]]: return char elif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["esc"]: lowerCAmelCase : int = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["mod_int"]: lowerCAmelCase : Tuple = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP["arrow_begin"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP["arrow_end"] - ARROW_KEY_FLAG: return chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG ) else: return KEYMAP["undefined"] else: return get_raw_chars() else: if char in string.printable: return char else: return KEYMAP["undefined"]
693
0
from typing import Dict, List from nltk.translate import gleu_score import datasets from datasets import MetricInfo lowerCAmelCase : List[str] ="""\ @misc{wu2016googles, title={Google's Neural Machine Translation System: Bridging the Gap between Human and Machine Translation}, author={Yonghui Wu and Mike Schuster and Zhifeng Chen and Quoc V. Le and Mohammad Norouzi and Wolfgang Macherey and Maxim Krikun and Yuan Cao and Qin Gao and Klaus Macherey and Jeff Klingner and Apurva Shah and Melvin Johnson and Xiaobing Liu and Łukasz Kaiser and Stephan Gouws and Yoshikiyo Kato and Taku Kudo and Hideto Kazawa and Keith Stevens and George Kurian and Nishant Patil and Wei Wang and Cliff Young and Jason Smith and Jason Riesa and Alex Rudnick and Oriol Vinyals and Greg Corrado and Macduff Hughes and Jeffrey Dean}, year={2016}, eprint={1609.08144}, archivePrefix={arXiv}, primaryClass={cs.CL} } """ lowerCAmelCase : Optional[Any] ="""\ The BLEU score has some undesirable properties when used for single sentences, as it was designed to be a corpus measure. We therefore use a slightly different score for our RL experiments which we call the 'GLEU score'. For the GLEU score, we record all sub-sequences of 1, 2, 3 or 4 tokens in output and target sequence (n-grams). We then compute a recall, which is the ratio of the number of matching n-grams to the number of total n-grams in the target (ground truth) sequence, and a precision, which is the ratio of the number of matching n-grams to the number of total n-grams in the generated output sequence. Then GLEU score is simply the minimum of recall and precision. This GLEU score's range is always between 0 (no matches) and 1 (all match) and it is symmetrical when switching output and target. According to our experiments, GLEU score correlates quite well with the BLEU metric on a corpus level but does not have its drawbacks for our per sentence reward objective. """ lowerCAmelCase : Any ="""\ Computes corpus-level Google BLEU (GLEU) score of translated segments against one or more references. Instead of averaging the sentence level GLEU scores (i.e. macro-average precision), Wu et al. (2016) sum up the matching tokens and the max of hypothesis and reference tokens for each sentence, then compute using the aggregate values. Args: predictions (list of str): list of translations to score. Each translation should be tokenized into a list of tokens. references (list of list of str): list of lists of references for each translation. Each reference should be tokenized into a list of tokens. min_len (int): The minimum order of n-gram this function should extract. Defaults to 1. max_len (int): The maximum order of n-gram this function should extract. Defaults to 4. Returns: 'google_bleu': google_bleu score Examples: Example 1: >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which', ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always', ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat'] >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which', ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never', ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat'] >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was', ... 'interested', 'in', 'world', 'history'] >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history', ... 'because', 'he', 'read', 'the', 'book'] >>> list_of_references = [[ref1a], [ref2a]] >>> hypotheses = [hyp1, hyp2] >>> google_bleu = datasets.load_metric(\"google_bleu\") >>> results = google_bleu.compute(predictions=hypotheses, references=list_of_references) >>> print(round(results[\"google_bleu\"], 2)) 0.44 Example 2: >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which', ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always', ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat'] >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which', ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never', ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat'] >>> ref1b = ['It', 'is', 'a', 'guide', 'to', 'action', 'that', ... 'ensures', 'that', 'the', 'rubber', 'duck', 'will', 'never', ... 'heed', 'the', 'cat', 'commands'] >>> ref1c = ['It', 'is', 'the', 'practical', 'guide', 'for', 'the', ... 'rubber', 'duck', 'army', 'never', 'to', 'heed', 'the', 'directions', ... 'of', 'the', 'cat'] >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was', ... 'interested', 'in', 'world', 'history'] >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history', ... 'because', 'he', 'read', 'the', 'book'] >>> list_of_references = [[ref1a, ref1b, ref1c], [ref2a]] >>> hypotheses = [hyp1, hyp2] >>> google_bleu = datasets.load_metric(\"google_bleu\") >>> results = google_bleu.compute(predictions=hypotheses, references=list_of_references) >>> print(round(results[\"google_bleu\"], 2)) 0.61 Example 3: >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which', ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always', ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat'] >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which', ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never', ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat'] >>> ref1b = ['It', 'is', 'a', 'guide', 'to', 'action', 'that', ... 'ensures', 'that', 'the', 'rubber', 'duck', 'will', 'never', ... 'heed', 'the', 'cat', 'commands'] >>> ref1c = ['It', 'is', 'the', 'practical', 'guide', 'for', 'the', ... 'rubber', 'duck', 'army', 'never', 'to', 'heed', 'the', 'directions', ... 'of', 'the', 'cat'] >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was', ... 'interested', 'in', 'world', 'history'] >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history', ... 'because', 'he', 'read', 'the', 'book'] >>> list_of_references = [[ref1a, ref1b, ref1c], [ref2a]] >>> hypotheses = [hyp1, hyp2] >>> google_bleu = datasets.load_metric(\"google_bleu\") >>> results = google_bleu.compute(predictions=hypotheses, references=list_of_references, min_len=2) >>> print(round(results[\"google_bleu\"], 2)) 0.53 Example 4: >>> hyp1 = ['It', 'is', 'a', 'guide', 'to', 'action', 'which', ... 'ensures', 'that', 'the', 'rubber', 'duck', 'always', ... 'disobeys', 'the', 'commands', 'of', 'the', 'cat'] >>> ref1a = ['It', 'is', 'the', 'guiding', 'principle', 'which', ... 'guarantees', 'the', 'rubber', 'duck', 'forces', 'never', ... 'being', 'under', 'the', 'command', 'of', 'the', 'cat'] >>> ref1b = ['It', 'is', 'a', 'guide', 'to', 'action', 'that', ... 'ensures', 'that', 'the', 'rubber', 'duck', 'will', 'never', ... 'heed', 'the', 'cat', 'commands'] >>> ref1c = ['It', 'is', 'the', 'practical', 'guide', 'for', 'the', ... 'rubber', 'duck', 'army', 'never', 'to', 'heed', 'the', 'directions', ... 'of', 'the', 'cat'] >>> hyp2 = ['he', 'read', 'the', 'book', 'because', 'he', 'was', ... 'interested', 'in', 'world', 'history'] >>> ref2a = ['he', 'was', 'interested', 'in', 'world', 'history', ... 'because', 'he', 'read', 'the', 'book'] >>> list_of_references = [[ref1a, ref1b, ref1c], [ref2a]] >>> hypotheses = [hyp1, hyp2] >>> google_bleu = datasets.load_metric(\"google_bleu\") >>> results = google_bleu.compute(predictions=hypotheses,references=list_of_references, min_len=2, max_len=6) >>> print(round(results[\"google_bleu\"], 2)) 0.4 """ @datasets.utils.file_utils.add_start_docstrings(_DESCRIPTION , _KWARGS_DESCRIPTION ) class _a ( datasets.Metric ): def _snake_case ( self ) -> MetricInfo: return datasets.MetricInfo( description=_DESCRIPTION , citation=_CITATION , inputs_description=_KWARGS_DESCRIPTION , features=datasets.Features( { """predictions""": datasets.Sequence(datasets.Value("""string""" , id="""token""" ) , id="""sequence""" ), """references""": datasets.Sequence( datasets.Sequence(datasets.Value("""string""" , id="""token""" ) , id="""sequence""" ) , id="""references""" ), } ) , ) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ = 1 , lowercase_ = 4 , ) -> Dict[str, float]: return { "google_bleu": gleu_score.corpus_gleu( list_of_references=lowercase_ , hypotheses=lowercase_ , min_len=lowercase_ , max_len=lowercase_ ) }
706
# Imports import numpy as np class _a : def __init__( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> List[Any]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) def _snake_case ( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Union[str, Any]: if red is not None: lowerCAmelCase : str = red if green is not None: lowerCAmelCase : Optional[int] = green if blue is not None: lowerCAmelCase : Optional[int] = blue if red_edge is not None: lowerCAmelCase : Tuple = red_edge if nir is not None: lowerCAmelCase : Union[str, Any] = nir return True def _snake_case ( self , lowercase_="" , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Optional[int]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) lowerCAmelCase : int = { """ARVI2""": self.arvaa, """CCCI""": self.ccci, """CVI""": self.cvi, """GLI""": self.gli, """NDVI""": self.ndvi, """BNDVI""": self.bndvi, """redEdgeNDVI""": self.red_edge_ndvi, """GNDVI""": self.gndvi, """GBNDVI""": self.gbndvi, """GRNDVI""": self.grndvi, """RBNDVI""": self.rbndvi, """PNDVI""": self.pndvi, """ATSAVI""": self.atsavi, """BWDRVI""": self.bwdrvi, """CIgreen""": self.ci_green, """CIrededge""": self.ci_rededge, """CI""": self.ci, """CTVI""": self.ctvi, """GDVI""": self.gdvi, """EVI""": self.evi, """GEMI""": self.gemi, """GOSAVI""": self.gosavi, """GSAVI""": self.gsavi, """Hue""": self.hue, """IVI""": self.ivi, """IPVI""": self.ipvi, """I""": self.i, """RVI""": self.rvi, """MRVI""": self.mrvi, """MSAVI""": self.m_savi, """NormG""": self.norm_g, """NormNIR""": self.norm_nir, """NormR""": self.norm_r, """NGRDI""": self.ngrdi, """RI""": self.ri, """S""": self.s, """IF""": self._if, """DVI""": self.dvi, """TVI""": self.tvi, """NDRE""": self.ndre, } try: return funcs[index]() except KeyError: print("""Index not in the list!""" ) return False def _snake_case ( self ) -> Dict: return -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red))) def _snake_case ( self ) -> Optional[Any]: return ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / ( (self.nir - self.red) / (self.nir + self.red) ) def _snake_case ( self ) -> List[str]: return self.nir * (self.red / (self.green**2)) def _snake_case ( self ) -> Tuple: return (2 * self.green - self.red - self.blue) / ( 2 * self.green + self.red + self.blue ) def _snake_case ( self ) -> Optional[int]: return (self.nir - self.red) / (self.nir + self.red) def _snake_case ( self ) -> List[str]: return (self.nir - self.blue) / (self.nir + self.blue) def _snake_case ( self ) -> int: return (self.redEdge - self.red) / (self.redEdge + self.red) def _snake_case ( self ) -> Optional[Any]: return (self.nir - self.green) / (self.nir + self.green) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.blue)) / ( self.nir + (self.green + self.blue) ) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.red)) / ( self.nir + (self.green + self.red) ) def _snake_case ( self ) -> int: return (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red)) def _snake_case ( self ) -> List[str]: return (self.nir - (self.green + self.red + self.blue)) / ( self.nir + (self.green + self.red + self.blue) ) def _snake_case ( self , lowercase_=0.0_8 , lowercase_=1.2_2 , lowercase_=0.0_3 ) -> int: return a * ( (self.nir - a * self.red - b) / (a * self.nir + self.red - a * b + x * (1 + a**2)) ) def _snake_case ( self ) -> Optional[Any]: return (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue) def _snake_case ( self ) -> Any: return (self.nir / self.green) - 1 def _snake_case ( self ) -> List[Any]: return (self.nir / self.redEdge) - 1 def _snake_case ( self ) -> str: return (self.red - self.blue) / self.red def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self.ndvi() return ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2)) def _snake_case ( self ) -> Optional[Any]: return self.nir - self.green def _snake_case ( self ) -> int: return 2.5 * ( (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1) ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Tuple = (2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / ( self.nir + self.red + 0.5 ) return n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red) def _snake_case ( self , lowercase_=0.1_6 ) -> Optional[int]: return (self.nir - self.green) / (self.nir + self.green + y) def _snake_case ( self , lowercase_=0.5 ) -> List[str]: return ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n) def _snake_case ( self ) -> Any: return np.arctan( ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) ) def _snake_case ( self , lowercase_=None , lowercase_=None ) -> List[Any]: return (self.nir - b) / (a * self.red) def _snake_case ( self ) -> Any: return (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1) def _snake_case ( self ) -> str: return (self.red + self.green + self.blue) / 3_0.5 def _snake_case ( self ) -> Union[str, Any]: return self.nir / self.red def _snake_case ( self ) -> Tuple: return (self.rvi() - 1) / (self.rvi() + 1) def _snake_case ( self ) -> Dict: return ( (2 * self.nir + 1) - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2) ) / 2 def _snake_case ( self ) -> List[Any]: return self.green / (self.nir + self.red + self.green) def _snake_case ( self ) -> int: return self.nir / (self.nir + self.red + self.green) def _snake_case ( self ) -> Dict: return self.red / (self.nir + self.red + self.green) def _snake_case ( self ) -> List[Any]: return (self.green - self.red) / (self.green + self.red) def _snake_case ( self ) -> Optional[int]: return (self.red - self.green) / (self.red + self.green) def _snake_case ( self ) -> Tuple: lowerCAmelCase : Any = np.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] ) lowerCAmelCase : Dict = np.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] ) return (max_value - min_value) / max_value def _snake_case ( self ) -> int: return (2 * self.red - self.green - self.blue) / (self.green - self.blue) def _snake_case ( self ) -> List[str]: return self.nir / self.red def _snake_case ( self ) -> int: return (self.ndvi() + 0.5) ** (1 / 2) def _snake_case ( self ) -> str: return (self.nir - self.redEdge) / (self.nir + self.redEdge)
693
0
import argparse import os import shutil import torch from emmental.modules import MagnitudeBinarizer, ThresholdBinarizer, TopKBinarizer def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : List[Any] = args.pruning_method lowerCAmelCase : int = args.threshold lowerCAmelCase : Optional[Any] = args.model_name_or_path.rstrip("""/""" ) lowerCAmelCase : str = args.target_model_path print(F"""Load fine-pruned model from {model_name_or_path}""" ) lowerCAmelCase : Dict = torch.load(os.path.join(lowercase_ ,"""pytorch_model.bin""" ) ) lowerCAmelCase : Tuple = {} for name, tensor in model.items(): if "embeddings" in name or "LayerNorm" in name or "pooler" in name: lowerCAmelCase : List[Any] = tensor print(F"""Copied layer {name}""" ) elif "classifier" in name or "qa_output" in name: lowerCAmelCase : str = tensor print(F"""Copied layer {name}""" ) elif "bias" in name: lowerCAmelCase : List[str] = tensor print(F"""Copied layer {name}""" ) else: if pruning_method == "magnitude": lowerCAmelCase : List[Any] = MagnitudeBinarizer.apply(inputs=lowercase_ ,threshold=lowercase_ ) lowerCAmelCase : List[Any] = tensor * mask print(F"""Pruned layer {name}""" ) elif pruning_method == "topK": if "mask_scores" in name: continue lowerCAmelCase : Tuple = name[:-6] lowerCAmelCase : Union[str, Any] = model[F"""{prefix_}mask_scores"""] lowerCAmelCase : List[str] = TopKBinarizer.apply(lowercase_ ,lowercase_ ) lowerCAmelCase : Optional[Any] = tensor * mask print(F"""Pruned layer {name}""" ) elif pruning_method == "sigmoied_threshold": if "mask_scores" in name: continue lowerCAmelCase : List[str] = name[:-6] lowerCAmelCase : Union[str, Any] = model[F"""{prefix_}mask_scores"""] lowerCAmelCase : Tuple = ThresholdBinarizer.apply(lowercase_ ,lowercase_ ,lowercase_ ) lowerCAmelCase : Union[str, Any] = tensor * mask print(F"""Pruned layer {name}""" ) elif pruning_method == "l0": if "mask_scores" in name: continue lowerCAmelCase : str = name[:-6] lowerCAmelCase : List[Any] = model[F"""{prefix_}mask_scores"""] lowerCAmelCase : Optional[int] = -0.1, 1.1 lowerCAmelCase : Dict = torch.sigmoid(lowercase_ ) lowerCAmelCase : str = s * (r - l) + l lowerCAmelCase : Any = s_bar.clamp(min=0.0 ,max=1.0 ) lowerCAmelCase : Optional[int] = tensor * mask print(F"""Pruned layer {name}""" ) else: raise ValueError("""Unknown pruning method""" ) if target_model_path is None: lowerCAmelCase : int = os.path.join( os.path.dirname(lowercase_ ) ,F"""bertarized_{os.path.basename(lowercase_ )}""" ) if not os.path.isdir(lowercase_ ): shutil.copytree(lowercase_ ,lowercase_ ) print(F"""\nCreated folder {target_model_path}""" ) torch.save(lowercase_ ,os.path.join(lowercase_ ,"""pytorch_model.bin""" ) ) print("""\nPruned model saved! See you later!""" ) if __name__ == "__main__": lowerCAmelCase : Optional[Any] =argparse.ArgumentParser() parser.add_argument( '--pruning_method', choices=['l0', 'magnitude', 'topK', 'sigmoied_threshold'], type=str, required=True, help=( 'Pruning Method (l0 = L0 regularization, magnitude = Magnitude pruning, topK = Movement pruning,' ' sigmoied_threshold = Soft movement pruning)' ), ) parser.add_argument( '--threshold', type=float, required=False, help=( 'For `magnitude` and `topK`, it is the level of remaining weights (in %) in the fine-pruned model.' 'For `sigmoied_threshold`, it is the threshold \tau against which the (sigmoied) scores are compared.' 'Not needed for `l0`' ), ) parser.add_argument( '--model_name_or_path', type=str, required=True, help='Folder containing the model that was previously fine-pruned', ) parser.add_argument( '--target_model_path', default=None, type=str, required=False, help='Folder containing the model that was previously fine-pruned', ) lowerCAmelCase : List[str] =parser.parse_args() main(args)
707
import argparse import json import math import os import time import traceback import zipfile from collections import Counter import requests def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[str] = None if token is not None: lowerCAmelCase : Union[str, Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[Any] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{workflow_run_id}/jobs?per_page=100""" lowerCAmelCase : Any = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) lowerCAmelCase : int = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : List[str] = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() job_links.update({job["""name"""]: job["""html_url"""] for job in result["""jobs"""]} ) return job_links except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = None if token is not None: lowerCAmelCase : str = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : Optional[int] = F"""https://api.github.com/repos/huggingface/transformers/actions/runs/{worflow_run_id}/artifacts?per_page=100""" lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ).json() lowerCAmelCase : List[str] = {} try: artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) lowerCAmelCase : Optional[int] = math.ceil((result["""total_count"""] - 1_0_0) / 1_0_0 ) for i in range(SCREAMING_SNAKE_CASE__ ): lowerCAmelCase : int = requests.get(url + F"""&page={i + 2}""" ,headers=SCREAMING_SNAKE_CASE__ ).json() artifacts.update({artifact["""name"""]: artifact["""archive_download_url"""] for artifact in result["""artifacts"""]} ) return artifacts except Exception: print(F"""Unknown error, could not fetch links:\n{traceback.format_exc()}""" ) return {} def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = None if token is not None: lowerCAmelCase : Optional[Any] = {"""Accept""": """application/vnd.github+json""", """Authorization""": F"""Bearer {token}"""} lowerCAmelCase : str = requests.get(SCREAMING_SNAKE_CASE__ ,headers=SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = result.headers["""Location"""] lowerCAmelCase : Optional[int] = requests.get(SCREAMING_SNAKE_CASE__ ,allow_redirects=SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Union[str, Any] = os.path.join(SCREAMING_SNAKE_CASE__ ,F"""{artifact_name}.zip""" ) with open(SCREAMING_SNAKE_CASE__ ,"""wb""" ) as fp: fp.write(response.content ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = [] lowerCAmelCase : Optional[int] = [] lowerCAmelCase : Optional[int] = None with zipfile.ZipFile(SCREAMING_SNAKE_CASE__ ) as z: for filename in z.namelist(): if not os.path.isdir(SCREAMING_SNAKE_CASE__ ): # read the file if filename in ["failures_line.txt", "summary_short.txt", "job_name.txt"]: with z.open(SCREAMING_SNAKE_CASE__ ) as f: for line in f: lowerCAmelCase : Optional[Any] = line.decode("""UTF-8""" ).strip() if filename == "failures_line.txt": try: # `error_line` is the place where `error` occurs lowerCAmelCase : str = line[: line.index(""": """ )] lowerCAmelCase : Optional[int] = line[line.index(""": """ ) + len(""": """ ) :] errors.append([error_line, error] ) except Exception: # skip un-related lines pass elif filename == "summary_short.txt" and line.startswith("""FAILED """ ): # `test` is the test method that failed lowerCAmelCase : Union[str, Any] = line[len("""FAILED """ ) :] failed_tests.append(SCREAMING_SNAKE_CASE__ ) elif filename == "job_name.txt": lowerCAmelCase : Union[str, Any] = line if len(SCREAMING_SNAKE_CASE__ ) != len(SCREAMING_SNAKE_CASE__ ): raise ValueError( F"""`errors` and `failed_tests` should have the same number of elements. Got {len(SCREAMING_SNAKE_CASE__ )} for `errors` """ F"""and {len(SCREAMING_SNAKE_CASE__ )} for `failed_tests` instead. The test reports in {artifact_zip_path} have some""" """ problem.""" ) lowerCAmelCase : Optional[int] = None if job_name and job_links: lowerCAmelCase : Optional[int] = job_links.get(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) # A list with elements of the form (line of error, error, failed test) lowerCAmelCase : Union[str, Any] = [x + [y] + [job_link] for x, y in zip(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ )] return result def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : str = [] lowerCAmelCase : Union[str, Any] = [os.path.join(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ) for p in os.listdir(SCREAMING_SNAKE_CASE__ ) if p.endswith(""".zip""" )] for p in paths: errors.extend(get_errors_from_single_artifact(SCREAMING_SNAKE_CASE__ ,job_links=SCREAMING_SNAKE_CASE__ ) ) return errors def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : int = Counter() counter.update([x[1] for x in logs] ) lowerCAmelCase : List[str] = counter.most_common() lowerCAmelCase : Union[str, Any] = {} for error, count in counts: if error_filter is None or error not in error_filter: lowerCAmelCase : List[Any] = {"""count""": count, """failed_tests""": [(x[2], x[0]) for x in logs if x[1] == error]} lowerCAmelCase : int = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = test.split("""::""" )[0] if test.startswith("""tests/models/""" ): lowerCAmelCase : str = test.split("""/""" )[2] else: lowerCAmelCase : List[Any] = None return test def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' lowerCAmelCase : List[Any] = [(x[0], x[1], get_model(x[2] )) for x in logs] lowerCAmelCase : int = [x for x in logs if x[2] is not None] lowerCAmelCase : Optional[Any] = {x[2] for x in logs} lowerCAmelCase : Dict = {} for test in tests: lowerCAmelCase : Optional[int] = Counter() # count by errors in `test` counter.update([x[1] for x in logs if x[2] == test] ) lowerCAmelCase : Tuple = counter.most_common() lowerCAmelCase : Union[str, Any] = {error: count for error, count in counts if (error_filter is None or error not in error_filter)} lowerCAmelCase : List[Any] = sum(error_counts.values() ) if n_errors > 0: lowerCAmelCase : Optional[int] = {"""count""": n_errors, """errors""": error_counts} lowerCAmelCase : Any = dict(sorted(r.items() ,key=lambda SCREAMING_SNAKE_CASE__ : item[1]["count"] ,reverse=SCREAMING_SNAKE_CASE__ ) ) return r def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = """| no. | error | status |""" lowerCAmelCase : List[Any] = """|-:|:-|:-|""" lowerCAmelCase : Union[str, Any] = [header, sep] for error in reduced_by_error: lowerCAmelCase : List[str] = reduced_by_error[error]["""count"""] lowerCAmelCase : Any = F"""| {count} | {error[:1_0_0]} | |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = """| model | no. of errors | major error | count |""" lowerCAmelCase : Any = """|-:|-:|-:|-:|""" lowerCAmelCase : str = [header, sep] for model in reduced_by_model: lowerCAmelCase : Any = reduced_by_model[model]["""count"""] lowerCAmelCase , lowerCAmelCase : Optional[int] = list(reduced_by_model[model]["""errors"""].items() )[0] lowerCAmelCase : Optional[Any] = F"""| {model} | {count} | {error[:6_0]} | {_count} |""" lines.append(SCREAMING_SNAKE_CASE__ ) return "\n".join(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": lowerCAmelCase : int =argparse.ArgumentParser() # Required parameters parser.add_argument('--workflow_run_id', type=str, required=True, help='A GitHub Actions workflow run id.') parser.add_argument( '--output_dir', type=str, required=True, help='Where to store the downloaded artifacts and other result files.', ) parser.add_argument('--token', default=None, type=str, help='A token that has actions:read permission.') lowerCAmelCase : Dict =parser.parse_args() os.makedirs(args.output_dir, exist_ok=True) lowerCAmelCase : Optional[int] =get_job_links(args.workflow_run_id, token=args.token) lowerCAmelCase : List[Any] ={} # To deal with `workflow_call` event, where a job name is the combination of the job names in the caller and callee. # For example, `PyTorch 1.11 / Model tests (models/albert, single-gpu)`. if _job_links: for k, v in _job_links.items(): # This is how GitHub actions combine job names. if " / " in k: lowerCAmelCase : str =k.find(' / ') lowerCAmelCase : Any =k[index + len(' / ') :] lowerCAmelCase : str =v with open(os.path.join(args.output_dir, 'job_links.json'), 'w', encoding='UTF-8') as fp: json.dump(job_links, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Any =get_artifacts_links(args.workflow_run_id, token=args.token) with open(os.path.join(args.output_dir, 'artifacts.json'), 'w', encoding='UTF-8') as fp: json.dump(artifacts, fp, ensure_ascii=False, indent=4) for idx, (name, url) in enumerate(artifacts.items()): download_artifact(name, url, args.output_dir, args.token) # Be gentle to GitHub time.sleep(1) lowerCAmelCase : List[Any] =get_all_errors(args.output_dir, job_links=job_links) # `e[1]` is the error lowerCAmelCase : str =Counter() counter.update([e[1] for e in errors]) # print the top 30 most common test errors lowerCAmelCase : int =counter.most_common(30) for item in most_common: print(item) with open(os.path.join(args.output_dir, 'errors.json'), 'w', encoding='UTF-8') as fp: json.dump(errors, fp, ensure_ascii=False, indent=4) lowerCAmelCase : Optional[int] =reduce_by_error(errors) lowerCAmelCase : Tuple =reduce_by_model(errors) lowerCAmelCase : Optional[Any] =make_github_table(reduced_by_error) lowerCAmelCase : Union[str, Any] =make_github_table_per_model(reduced_by_model) with open(os.path.join(args.output_dir, 'reduced_by_error.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa) with open(os.path.join(args.output_dir, 'reduced_by_model.txt'), 'w', encoding='UTF-8') as fp: fp.write(sa)
693
0
'''simple docstring''' import unittest from transformers import TrOCRConfig from transformers.testing_utils import is_torch_available, require_torch, torch_device from ...generation.test_utils import GenerationTesterMixin from ...test_configuration_common import ConfigTester from ...test_modeling_common import ModelTesterMixin, ids_tensor from ...test_pipeline_mixin import PipelineTesterMixin if is_torch_available(): import torch from transformers.models.trocr.modeling_trocr import TrOCRDecoder, TrOCRForCausalLM @require_torch class _a : def __init__( self , lowercase_ , lowercase_=99 , lowercase_=13 , lowercase_=16 , lowercase_=7 , lowercase_=True , lowercase_=True , lowercase_=True , lowercase_=False , lowercase_=True , lowercase_=2 , lowercase_=32 , lowercase_=4 , lowercase_=4 , lowercase_=30 , lowercase_=0 , lowercase_=1 , lowercase_=2 , lowercase_=None , ) -> Union[str, Any]: lowerCAmelCase : Dict = parent lowerCAmelCase : Dict = batch_size lowerCAmelCase : Tuple = decoder_seq_length # For common tests lowerCAmelCase : List[Any] = self.decoder_seq_length lowerCAmelCase : Optional[int] = is_training lowerCAmelCase : str = use_attention_mask lowerCAmelCase : Tuple = use_labels lowerCAmelCase : Dict = vocab_size lowerCAmelCase : Union[str, Any] = d_model lowerCAmelCase : Tuple = d_model lowerCAmelCase : Union[str, Any] = decoder_layers lowerCAmelCase : str = decoder_layers lowerCAmelCase : Tuple = decoder_ffn_dim lowerCAmelCase : List[Any] = decoder_attention_heads lowerCAmelCase : List[Any] = decoder_attention_heads lowerCAmelCase : Optional[Any] = eos_token_id lowerCAmelCase : Optional[int] = bos_token_id lowerCAmelCase : Union[str, Any] = pad_token_id lowerCAmelCase : List[Any] = decoder_start_token_id lowerCAmelCase : Optional[Any] = use_cache lowerCAmelCase : str = max_position_embeddings lowerCAmelCase : Tuple = None lowerCAmelCase : List[Any] = decoder_seq_length lowerCAmelCase : Optional[Any] = 2 lowerCAmelCase : List[Any] = 1 def _snake_case ( self ) -> str: lowerCAmelCase : str = ids_tensor([self.batch_size, self.decoder_seq_length] , self.vocab_size ) lowerCAmelCase : List[Any] = None if self.use_attention_mask: lowerCAmelCase : Union[str, Any] = ids_tensor([self.batch_size, self.decoder_seq_length] , vocab_size=2 ) lowerCAmelCase : int = None if self.use_labels: lowerCAmelCase : List[Any] = ids_tensor([self.batch_size, self.decoder_seq_length] , self.vocab_size ) lowerCAmelCase : str = TrOCRConfig( vocab_size=self.vocab_size , d_model=self.d_model , decoder_layers=self.decoder_layers , decoder_ffn_dim=self.decoder_ffn_dim , decoder_attention_heads=self.decoder_attention_heads , eos_token_id=self.eos_token_id , bos_token_id=self.bos_token_id , use_cache=self.use_cache , pad_token_id=self.pad_token_id , decoder_start_token_id=self.decoder_start_token_id , max_position_embeddings=self.max_position_embeddings , ) return (config, input_ids, attention_mask, lm_labels) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ , ) -> Optional[int]: lowerCAmelCase : Union[str, Any] = True lowerCAmelCase : Any = TrOCRDecoder(config=_snake_case ).to(_snake_case ).eval() lowerCAmelCase : Optional[Any] = input_ids[:2] input_ids[input_ids == 0] += 1 # first forward pass lowerCAmelCase : Optional[Any] = model(_snake_case , use_cache=_snake_case ) lowerCAmelCase : Union[str, Any] = model(_snake_case ) lowerCAmelCase : Dict = model(_snake_case , use_cache=_snake_case ) self.parent.assertTrue(len(_snake_case ) == len(_snake_case ) ) self.parent.assertTrue(len(_snake_case ) == len(_snake_case ) + 1 ) lowerCAmelCase : str = outputs["""past_key_values"""] # create hypothetical next token and extent to next_input_ids lowerCAmelCase : List[Any] = ids_tensor((2, 1) , config.vocab_size - 1 ) + 1 # append to next input_ids and lowerCAmelCase : Dict = torch.cat([input_ids, next_tokens] , dim=-1 ) lowerCAmelCase : List[Any] = model(_snake_case )["""last_hidden_state"""] lowerCAmelCase : Any = model(_snake_case , past_key_values=_snake_case )["""last_hidden_state"""] # select random slice lowerCAmelCase : int = ids_tensor((1,) , output_from_past.shape[-1] ).item() lowerCAmelCase : Dict = output_from_no_past[:, next_input_ids.shape[-1] - 1, random_slice_idx].detach() lowerCAmelCase : Union[str, Any] = output_from_past[:, 0, random_slice_idx].detach() # test that outputs are equal for slice assert torch.allclose(_snake_case , _snake_case , atol=1e-3 ) def _snake_case ( self ) -> List[Any]: lowerCAmelCase : str = self.prepare_config_and_inputs() lowerCAmelCase , lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : int = config_and_inputs lowerCAmelCase : Optional[Any] = {"""input_ids""": input_ids, """attention_mask""": attention_mask} return config, inputs_dict @require_torch class _a ( UpperCAmelCase_ , UpperCAmelCase_ , UpperCAmelCase_ , unittest.TestCase ): _UpperCamelCase: Any = (TrOCRDecoder, TrOCRForCausalLM) if is_torch_available() else () _UpperCamelCase: List[str] = (TrOCRForCausalLM,) if is_torch_available() else () _UpperCamelCase: List[str] = {"text-generation": TrOCRForCausalLM} if is_torch_available() else {} _UpperCamelCase: Dict = True _UpperCamelCase: str = False def _snake_case ( self ) -> Tuple: lowerCAmelCase : Any = TrOCRStandaloneDecoderModelTester(self , is_training=_snake_case ) lowerCAmelCase : Tuple = ConfigTester(self , config_class=_snake_case ) def _snake_case ( self ) -> Optional[Any]: pass def _snake_case ( self ) -> Dict: pass def _snake_case ( self ) -> int: pass def _snake_case ( self ) -> List[str]: self.config_tester.run_common_tests() def _snake_case ( self ) -> Any: lowerCAmelCase : Tuple = self.model_tester.prepare_config_and_inputs() self.model_tester.create_and_check_decoder_model_past(*_snake_case ) def _snake_case ( self ) -> Any: return @unittest.skip("""The model doesn\'t support left padding""" ) # and it's not used enough to be worth fixing :) def _snake_case ( self ) -> Tuple: pass
708
from typing import TYPE_CHECKING # rely on isort to merge the imports from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : Optional[int] ={ 'configuration_autoformer': [ 'AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'AutoformerConfig', ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =[ 'AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'AutoformerForPrediction', 'AutoformerModel', 'AutoformerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_autoformer import ( AUTOFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, AutoformerConfig, ) try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_autoformer import ( AUTOFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, AutoformerForPrediction, AutoformerModel, AutoformerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
0
'''simple docstring''' import torch def _UpperCAmelCase ( ): '''simple docstring''' if torch.cuda.is_available(): lowerCAmelCase : int = torch.cuda.device_count() else: lowerCAmelCase : int = 0 print(F"""Successfully ran on {num_gpus} GPUs""" ) if __name__ == "__main__": main()
709
import copy from collections import OrderedDict from typing import Dict, Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ..auto import CONFIG_MAPPING lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Union[str, Any] ={ 'facebook/detr-resnet-50': 'https://huggingface.co/facebook/detr-resnet-50/resolve/main/config.json', # See all DETR models at https://huggingface.co/models?filter=detr } class _a ( snake_case_ ): _UpperCamelCase: List[str] = "detr" _UpperCamelCase: Dict = ["past_key_values"] _UpperCamelCase: Optional[int] = { "hidden_size": "d_model", "num_attention_heads": "encoder_attention_heads", } def __init__( self , lowercase_=True , lowercase_=None , lowercase_=3 , lowercase_=100 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=6 , lowercase_=2048 , lowercase_=8 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=True , lowercase_="relu" , lowercase_=256 , lowercase_=0.1 , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.0_2 , lowercase_=1.0 , lowercase_=False , lowercase_="sine" , lowercase_="resnet50" , lowercase_=True , lowercase_=False , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=1 , lowercase_=1 , lowercase_=5 , lowercase_=2 , lowercase_=0.1 , **lowercase_ , ) -> Optional[int]: if backbone_config is not None and use_timm_backbone: raise ValueError("""You can't specify both `backbone_config` and `use_timm_backbone`.""" ) if not use_timm_backbone: if backbone_config is None: logger.info("""`backbone_config` is `None`. Initializing the config with the default `ResNet` backbone.""" ) lowerCAmelCase : Optional[Any] = CONFIG_MAPPING["""resnet"""](out_features=["""stage4"""] ) elif isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = backbone_config.get("""model_type""" ) lowerCAmelCase : int = CONFIG_MAPPING[backbone_model_type] lowerCAmelCase : Optional[int] = config_class.from_dict(lowercase_ ) # set timm attributes to None lowerCAmelCase , lowerCAmelCase , lowerCAmelCase : Dict = None, None, None lowerCAmelCase : Any = use_timm_backbone lowerCAmelCase : int = backbone_config lowerCAmelCase : Optional[int] = num_channels lowerCAmelCase : Optional[Any] = num_queries lowerCAmelCase : List[str] = d_model lowerCAmelCase : Optional[int] = encoder_ffn_dim lowerCAmelCase : Dict = encoder_layers lowerCAmelCase : str = encoder_attention_heads lowerCAmelCase : List[Any] = decoder_ffn_dim lowerCAmelCase : List[Any] = decoder_layers lowerCAmelCase : Union[str, Any] = decoder_attention_heads lowerCAmelCase : str = dropout lowerCAmelCase : Dict = attention_dropout lowerCAmelCase : Union[str, Any] = activation_dropout lowerCAmelCase : str = activation_function lowerCAmelCase : Optional[int] = init_std lowerCAmelCase : Any = init_xavier_std lowerCAmelCase : Dict = encoder_layerdrop lowerCAmelCase : int = decoder_layerdrop lowerCAmelCase : Tuple = encoder_layers lowerCAmelCase : Optional[int] = auxiliary_loss lowerCAmelCase : List[str] = position_embedding_type lowerCAmelCase : Any = backbone lowerCAmelCase : Union[str, Any] = use_pretrained_backbone lowerCAmelCase : List[Any] = dilation # Hungarian matcher lowerCAmelCase : Tuple = class_cost lowerCAmelCase : Union[str, Any] = bbox_cost lowerCAmelCase : Optional[Any] = giou_cost # Loss coefficients lowerCAmelCase : List[Any] = mask_loss_coefficient lowerCAmelCase : Optional[int] = dice_loss_coefficient lowerCAmelCase : Tuple = bbox_loss_coefficient lowerCAmelCase : Dict = giou_loss_coefficient lowerCAmelCase : str = eos_coefficient super().__init__(is_encoder_decoder=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> int: return self.encoder_attention_heads @property def _snake_case ( self ) -> int: return self.d_model @classmethod def _snake_case ( cls , lowercase_ , **lowercase_ ) -> Any: return cls(backbone_config=lowercase_ , **lowercase_ ) def _snake_case ( self ) -> Dict[str, any]: lowerCAmelCase : Optional[int] = copy.deepcopy(self.__dict__ ) if output["backbone_config"] is not None: lowerCAmelCase : List[str] = self.backbone_config.to_dict() lowerCAmelCase : List[Any] = self.__class__.model_type return output class _a ( snake_case_ ): _UpperCamelCase: Any = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ("""pixel_mask""", {0: """batch"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-5 @property def _snake_case ( self ) -> int: return 12
693
0
from __future__ import annotations def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if len(snake_case_ ) == 0: return array lowerCAmelCase : Tuple = min(snake_case_ ), max(snake_case_ ) # Compute the variables lowerCAmelCase : Tuple = _max - _min + 1 lowerCAmelCase : str = [0] * holes_range, [0] * holes_range # Make the sorting. for i in array: lowerCAmelCase : Union[str, Any] = i - _min lowerCAmelCase : List[str] = i holes_repeat[index] += 1 # Makes the array back by replacing the numbers. lowerCAmelCase : Tuple = 0 for i in range(snake_case_ ): while holes_repeat[i] > 0: lowerCAmelCase : Union[str, Any] = holes[i] index += 1 holes_repeat[i] -= 1 # Returns the sorted array. return array if __name__ == "__main__": import doctest doctest.testmod() lowerCAmelCase : int =input('Enter numbers separated by comma:\n') lowerCAmelCase : Optional[int] =[int(x) for x in user_input.split(',')] print(pigeon_sort(unsorted))
710
import json import logging import os import sys from pathlib import Path import finetune_rag from transformers.file_utils import is_apex_available from transformers.testing_utils import ( TestCasePlus, execute_subprocess_async, require_ray, require_torch_gpu, require_torch_multi_gpu, ) logging.basicConfig(level=logging.DEBUG) lowerCAmelCase : int =logging.getLogger() lowerCAmelCase : str =logging.StreamHandler(sys.stdout) logger.addHandler(stream_handler) class _a ( snake_case_ ): def _snake_case ( self , lowercase_ ) -> List[Any]: os.makedirs(lowercase_ , exist_ok=lowercase_ ) lowerCAmelCase : int = {"""source""": """What is love ?""", """target""": """life"""} lowerCAmelCase : Optional[Any] = {"""train""": 12, """val""": 2, """test""": 2} for split in ["train", "test", "val"]: for field in ["source", "target"]: lowerCAmelCase : Tuple = """\n""".join([contents[field]] * n_lines[split] ) with open(os.path.join(lowercase_ , f"""{split}.{field}""" ) , """w""" ) as f: f.write(lowercase_ ) def _snake_case ( self , lowercase_ , lowercase_ = "pytorch" ) -> str: lowerCAmelCase : Dict = self.get_auto_remove_tmp_dir() lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """output""" ) lowerCAmelCase : Dict = os.path.join(lowercase_ , """data""" ) self._create_dummy_data(data_dir=lowercase_ ) lowerCAmelCase : str = f""" --data_dir {data_dir} \ --output_dir {output_dir} \ --model_name_or_path facebook/rag-sequence-base \ --model_type rag_sequence \ --do_train \ --do_predict \ --n_val -1 \ --val_check_interval 1.0 \ --train_batch_size 2 \ --eval_batch_size 1 \ --max_source_length 25 \ --max_target_length 25 \ --val_max_target_length 25 \ --test_max_target_length 25 \ --label_smoothing 0.1 \ --dropout 0.1 \ --attention_dropout 0.1 \ --weight_decay 0.001 \ --adam_epsilon 1e-08 \ --max_grad_norm 0.1 \ --lr_scheduler polynomial \ --learning_rate 3e-04 \ --num_train_epochs 1 \ --warmup_steps 4 \ --gradient_accumulation_steps 1 \ --distributed-port 8787 \ --use_dummy_dataset 1 \ --distributed_retriever {distributed_retriever} \ """.split() if gpus > 0: testargs.append(f"""--gpus={gpus}""" ) if is_apex_available(): testargs.append("""--fp16""" ) else: testargs.append("""--gpus=0""" ) testargs.append("""--distributed_backend=ddp_cpu""" ) testargs.append("""--num_processes=2""" ) lowerCAmelCase : Optional[int] = [sys.executable, str(Path(finetune_rag.__file__ ).resolve() )] + testargs execute_subprocess_async(lowercase_ , env=self.get_env() ) lowerCAmelCase : Union[str, Any] = os.path.join(lowercase_ , """metrics.json""" ) with open(lowercase_ ) as f: lowerCAmelCase : List[str] = json.load(lowercase_ ) return result @require_torch_gpu def _snake_case ( self ) -> Any: lowerCAmelCase : Tuple = self._run_finetune(gpus=1 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self._run_finetune(gpus=2 ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_gpu @require_ray def _snake_case ( self ) -> int: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 ) @require_torch_multi_gpu @require_ray def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Optional[Any] = self._run_finetune(gpus=1 , distributed_retriever="""ray""" ) self.assertGreaterEqual(result["""test"""][0]["""test_avg_em"""] , 0.2 )
693
0
import json import os import tempfile from transformers.testing_utils import check_json_file_has_correct_format class _a : _UpperCamelCase: Dict = None def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Dict = self.feature_extraction_class(**self.feat_extract_dict ) lowerCAmelCase : Tuple = json.loads(feat_extract.to_json_string() ) for key, value in self.feat_extract_dict.items(): self.assertEqual(obj[key] , UpperCamelCase__ ) def _snake_case ( self ) -> str: lowerCAmelCase : Any = self.feature_extraction_class(**self.feat_extract_dict ) with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : Any = os.path.join(UpperCamelCase__ , """feat_extract.json""" ) feat_extract_first.to_json_file(UpperCamelCase__ ) lowerCAmelCase : Dict = self.feature_extraction_class.from_json_file(UpperCamelCase__ ) self.assertEqual(feat_extract_second.to_dict() , feat_extract_first.to_dict() ) def _snake_case ( self ) -> List[str]: lowerCAmelCase : Any = self.feature_extraction_class(**self.feat_extract_dict ) with tempfile.TemporaryDirectory() as tmpdirname: lowerCAmelCase : Any = feat_extract_first.save_pretrained(UpperCamelCase__ )[0] check_json_file_has_correct_format(UpperCamelCase__ ) lowerCAmelCase : Optional[int] = self.feature_extraction_class.from_pretrained(UpperCamelCase__ ) self.assertEqual(feat_extract_second.to_dict() , feat_extract_first.to_dict() ) def _snake_case ( self ) -> Tuple: lowerCAmelCase : str = self.feature_extraction_class() self.assertIsNotNone(UpperCamelCase__ )
711
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : Optional[int] ={ 'transfo-xl-wt103': 'https://huggingface.co/transfo-xl-wt103/resolve/main/config.json', } class _a ( snake_case_ ): _UpperCamelCase: Tuple = "transfo-xl" _UpperCamelCase: str = ["mems"] _UpperCamelCase: Dict = { "n_token": "vocab_size", "hidden_size": "d_model", "num_attention_heads": "n_head", "num_hidden_layers": "n_layer", } def __init__( self , lowercase_=267735 , lowercase_=[20000, 40000, 200000] , lowercase_=1024 , lowercase_=1024 , lowercase_=16 , lowercase_=64 , lowercase_=4096 , lowercase_=4 , lowercase_=False , lowercase_=18 , lowercase_=1600 , lowercase_=1000 , lowercase_=True , lowercase_=True , lowercase_=0 , lowercase_=-1 , lowercase_=True , lowercase_=0.1 , lowercase_=0.0 , lowercase_=True , lowercase_="normal" , lowercase_=0.0_1 , lowercase_=0.0_1 , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=0 , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : Union[str, Any] = [] self.cutoffs.extend(lowercase_ ) if proj_share_all_but_first: lowerCAmelCase : Optional[int] = [False] + [True] * len(self.cutoffs ) else: lowerCAmelCase : List[str] = [False] + [False] * len(self.cutoffs ) lowerCAmelCase : Optional[int] = d_model lowerCAmelCase : List[Any] = d_embed lowerCAmelCase : Union[str, Any] = d_head lowerCAmelCase : List[Any] = d_inner lowerCAmelCase : Optional[int] = div_val lowerCAmelCase : List[Any] = pre_lnorm lowerCAmelCase : Dict = n_layer lowerCAmelCase : Tuple = n_head lowerCAmelCase : Any = mem_len lowerCAmelCase : Union[str, Any] = same_length lowerCAmelCase : List[Any] = attn_type lowerCAmelCase : int = clamp_len lowerCAmelCase : List[str] = sample_softmax lowerCAmelCase : Optional[int] = adaptive lowerCAmelCase : Dict = dropout lowerCAmelCase : Optional[Any] = dropatt lowerCAmelCase : List[str] = untie_r lowerCAmelCase : List[str] = init lowerCAmelCase : Tuple = init_range lowerCAmelCase : str = proj_init_std lowerCAmelCase : str = init_std lowerCAmelCase : Optional[int] = layer_norm_epsilon super().__init__(eos_token_id=lowercase_ , **lowercase_ ) @property def _snake_case ( self ) -> Optional[Any]: # Message copied from Transformer-XL documentation logger.info(f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" ) return -1 @max_position_embeddings.setter def _snake_case ( self , lowercase_ ) -> Dict: # Message copied from Transformer-XL documentation raise NotImplementedError( f"""The model {self.model_type} is one of the few models that has no sequence length limit.""" )
693
0
import json from typing import Iterator, List, Union from tokenizers import AddedToken, Regex, Tokenizer, decoders, normalizers, pre_tokenizers, trainers from tokenizers.implementations.base_tokenizer import BaseTokenizer from tokenizers.models import Unigram from tokenizers.processors import TemplateProcessing class _a ( lowerCAmelCase__ ): def __init__( self , lowercase_ = "▁" , lowercase_ = True , lowercase_ = "<unk>" , lowercase_ = "</s>" , lowercase_ = "<pad>" , ) -> List[Any]: lowerCAmelCase : Union[str, Any] = { """pad""": {"""id""": 0, """token""": pad_token}, """eos""": {"""id""": 1, """token""": eos_token}, """unk""": {"""id""": 2, """token""": unk_token}, } lowerCAmelCase : Any = [None] * len(self.special_tokens ) for token_dict in self.special_tokens.values(): lowerCAmelCase : List[Any] = token_dict["""token"""] lowerCAmelCase : Optional[Any] = Tokenizer(Unigram() ) lowerCAmelCase : List[Any] = normalizers.Sequence( [ normalizers.Nmt(), normalizers.NFKC(), normalizers.Replace(Regex(""" {2,}""" ) , """ """ ), normalizers.Lowercase(), ] ) lowerCAmelCase : Tuple = pre_tokenizers.Sequence( [ pre_tokenizers.Metaspace(replacement=_lowerCamelCase , add_prefix_space=_lowerCamelCase ), pre_tokenizers.Digits(individual_digits=_lowerCamelCase ), pre_tokenizers.Punctuation(), ] ) lowerCAmelCase : Any = decoders.Metaspace(replacement=_lowerCamelCase , add_prefix_space=_lowerCamelCase ) lowerCAmelCase : int = TemplateProcessing( single=f"""$A {self.special_tokens['eos']['token']}""" , special_tokens=[(self.special_tokens["""eos"""]["""token"""], self.special_tokens["""eos"""]["""id"""])] , ) lowerCAmelCase : Tuple = { """model""": """SentencePieceUnigram""", """replacement""": replacement, """add_prefix_space""": add_prefix_space, } super().__init__(_lowerCamelCase , _lowerCamelCase ) def _snake_case ( self , lowercase_ , lowercase_ = 8000 , lowercase_ = True , ) -> Union[str, Any]: lowerCAmelCase : Union[str, Any] = trainers.UnigramTrainer( vocab_size=_lowerCamelCase , special_tokens=self.special_tokens_list , show_progress=_lowerCamelCase , ) if isinstance(_lowerCamelCase , _lowerCamelCase ): lowerCAmelCase : List[Any] = [files] self._tokenizer.train(_lowerCamelCase , trainer=_lowerCamelCase ) self.add_unk_id() def _snake_case ( self , lowercase_ , lowercase_ = 8000 , lowercase_ = True , ) -> List[str]: lowerCAmelCase : str = trainers.UnigramTrainer( vocab_size=_lowerCamelCase , special_tokens=self.special_tokens_list , show_progress=_lowerCamelCase , ) self._tokenizer.train_from_iterator(_lowerCamelCase , trainer=_lowerCamelCase ) self.add_unk_id() def _snake_case ( self ) -> Tuple: lowerCAmelCase : int = json.loads(self._tokenizer.to_str() ) lowerCAmelCase : List[str] = self.special_tokens["""unk"""]["""id"""] lowerCAmelCase : Tuple = Tokenizer.from_str(json.dumps(_lowerCamelCase ) )
712
import torch from diffusers import DiffusionPipeline class _a ( snake_case_ ): def __init__( self , lowercase_ , lowercase_ ) -> int: super().__init__() self.register_modules(unet=lowercase_ , scheduler=lowercase_ ) def __call__( self ) -> List[Any]: lowerCAmelCase : Union[str, Any] = torch.randn( (1, self.unet.config.in_channels, self.unet.config.sample_size, self.unet.config.sample_size) , ) lowerCAmelCase : Union[str, Any] = 1 lowerCAmelCase : Dict = self.unet(lowercase_ , lowercase_ ).sample lowerCAmelCase : str = self.scheduler.step(lowercase_ , lowercase_ , lowercase_ ).prev_sample lowerCAmelCase : Dict = scheduler_output - scheduler_output + torch.ones_like(lowercase_ ) return result
693
0
def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Optional[int] = [3_1, 2_8, 3_1, 3_0, 3_1, 3_0, 3_1, 3_1, 3_0, 3_1, 3_0, 3_1] lowerCAmelCase : Dict = 6 lowerCAmelCase : Tuple = 1 lowerCAmelCase : Optional[int] = 1_9_0_1 lowerCAmelCase : List[str] = 0 while year < 2_0_0_1: day += 7 if (year % 4 == 0 and year % 1_0_0 != 0) or (year % 4_0_0 == 0): if day > days_per_month[month - 1] and month != 2: month += 1 lowerCAmelCase : Tuple = day - days_per_month[month - 2] elif day > 2_9 and month == 2: month += 1 lowerCAmelCase : List[str] = day - 2_9 else: if day > days_per_month[month - 1]: month += 1 lowerCAmelCase : List[Any] = day - days_per_month[month - 2] if month > 1_2: year += 1 lowerCAmelCase : str = 1 if year < 2_0_0_1 and day == 1: sundays += 1 return sundays if __name__ == "__main__": print(solution())
713
import pytest import requests from datasets.utils.file_utils import http_head from .utils import OfflineSimulationMode, RequestWouldHangIndefinitelyError, offline @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_TIMES_OUT ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): requests.request("""GET""" ,"""https://huggingface.co""" ) with pytest.raises(requests.exceptions.ConnectTimeout ): requests.request("""GET""" ,"""https://huggingface.co""" ,timeout=1.0 ) @pytest.mark.integration def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.CONNECTION_FAILS ): with pytest.raises(requests.exceptions.ConnectionError ): requests.request("""GET""" ,"""https://huggingface.co""" ) def _UpperCAmelCase ( ): '''simple docstring''' with offline(OfflineSimulationMode.HF_DATASETS_OFFLINE_SET_TO_1 ): with pytest.raises(SCREAMING_SNAKE_CASE__ ): http_head("""https://huggingface.co""" )
693
0
import math import sys def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if number != int(a__ ): raise ValueError("""the value of input must be a natural number""" ) if number < 0: raise ValueError("""the value of input must not be a negative number""" ) if number == 0: return 1 lowerCAmelCase : int = [-1] * (number + 1) lowerCAmelCase : int = 0 for i in range(1 ,number + 1 ): lowerCAmelCase : int = sys.maxsize lowerCAmelCase : Union[str, Any] = int(math.sqrt(a__ ) ) for j in range(1 ,root + 1 ): lowerCAmelCase : List[Any] = 1 + answers[i - (j**2)] lowerCAmelCase : Dict = min(a__ ,a__ ) lowerCAmelCase : List[str] = answer return answers[number] if __name__ == "__main__": import doctest doctest.testmod()
714
import json import pathlib import unittest import numpy as np from transformers.testing_utils import require_torch, require_vision, slow from transformers.utils import is_torch_available, is_vision_available from ...test_image_processing_common import ImageProcessingSavingTestMixin, prepare_image_inputs if is_torch_available(): import torch if is_vision_available(): from PIL import Image from transformers import DetrImageProcessor class _a ( unittest.TestCase ): def __init__( self , lowercase_ , lowercase_=7 , lowercase_=3 , lowercase_=30 , lowercase_=400 , lowercase_=True , lowercase_=None , lowercase_=True , lowercase_=1 / 255 , lowercase_=True , lowercase_=[0.5, 0.5, 0.5] , lowercase_=[0.5, 0.5, 0.5] , lowercase_=True , ) -> Tuple: # by setting size["longest_edge"] > max_resolution we're effectively not testing this :p lowerCAmelCase : Optional[Any] = size if size is not None else {"""shortest_edge""": 18, """longest_edge""": 1333} lowerCAmelCase : Optional[int] = parent lowerCAmelCase : Optional[int] = batch_size lowerCAmelCase : Dict = num_channels lowerCAmelCase : str = min_resolution lowerCAmelCase : Optional[Any] = max_resolution lowerCAmelCase : Optional[int] = do_resize lowerCAmelCase : List[str] = size lowerCAmelCase : Dict = do_rescale lowerCAmelCase : Union[str, Any] = rescale_factor lowerCAmelCase : int = do_normalize lowerCAmelCase : Union[str, Any] = image_mean lowerCAmelCase : Dict = image_std lowerCAmelCase : Optional[int] = do_pad def _snake_case ( self ) -> Any: return { "do_resize": self.do_resize, "size": self.size, "do_rescale": self.do_rescale, "rescale_factor": self.rescale_factor, "do_normalize": self.do_normalize, "image_mean": self.image_mean, "image_std": self.image_std, "do_pad": self.do_pad, } def _snake_case ( self , lowercase_ , lowercase_=False ) -> List[Any]: if not batched: lowerCAmelCase : Tuple = image_inputs[0] if isinstance(lowercase_ , Image.Image ): lowerCAmelCase , lowerCAmelCase : Dict = image.size else: lowerCAmelCase , lowerCAmelCase : Tuple = image.shape[1], image.shape[2] if w < h: lowerCAmelCase : Union[str, Any] = int(self.size["""shortest_edge"""] * h / w ) lowerCAmelCase : Optional[Any] = self.size["""shortest_edge"""] elif w > h: lowerCAmelCase : List[Any] = self.size["""shortest_edge"""] lowerCAmelCase : List[Any] = int(self.size["""shortest_edge"""] * w / h ) else: lowerCAmelCase : Optional[int] = self.size["""shortest_edge"""] lowerCAmelCase : List[str] = self.size["""shortest_edge"""] else: lowerCAmelCase : Optional[int] = [] for image in image_inputs: lowerCAmelCase , lowerCAmelCase : int = self.get_expected_values([image] ) expected_values.append((expected_height, expected_width) ) lowerCAmelCase : Any = max(lowercase_ , key=lambda lowercase_ : item[0] )[0] lowerCAmelCase : Union[str, Any] = max(lowercase_ , key=lambda lowercase_ : item[1] )[1] return expected_height, expected_width @require_torch @require_vision class _a ( snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = DetrImageProcessor if is_vision_available() else None def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : List[str] = DetrImageProcessingTester(self ) @property def _snake_case ( self ) -> str: return self.image_processor_tester.prepare_image_processor_dict() def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[str] = self.image_processing_class(**self.image_processor_dict ) self.assertTrue(hasattr(lowercase_ , """image_mean""" ) ) self.assertTrue(hasattr(lowercase_ , """image_std""" ) ) self.assertTrue(hasattr(lowercase_ , """do_normalize""" ) ) self.assertTrue(hasattr(lowercase_ , """do_rescale""" ) ) self.assertTrue(hasattr(lowercase_ , """rescale_factor""" ) ) self.assertTrue(hasattr(lowercase_ , """do_resize""" ) ) self.assertTrue(hasattr(lowercase_ , """size""" ) ) self.assertTrue(hasattr(lowercase_ , """do_pad""" ) ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : Union[str, Any] = self.image_processing_class.from_dict(self.image_processor_dict ) self.assertEqual(image_processor.size , {"""shortest_edge""": 18, """longest_edge""": 1333} ) self.assertEqual(image_processor.do_pad , lowercase_ ) lowerCAmelCase : Optional[Any] = self.image_processing_class.from_dict( self.image_processor_dict , size=42 , max_size=84 , pad_and_return_pixel_mask=lowercase_ ) self.assertEqual(image_processor.size , {"""shortest_edge""": 42, """longest_edge""": 84} ) self.assertEqual(image_processor.do_pad , lowercase_ ) def _snake_case ( self ) -> List[Any]: pass def _snake_case ( self ) -> List[Any]: # Initialize image_processing lowerCAmelCase : str = self.image_processing_class(**self.image_processor_dict ) # create random PIL images lowerCAmelCase : str = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , Image.Image ) # Test not batched input lowerCAmelCase : int = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Tuple = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) lowerCAmelCase : Optional[int] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> Optional[int]: # Initialize image_processing lowerCAmelCase : int = self.image_processing_class(**self.image_processor_dict ) # create random numpy tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , numpify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , np.ndarray ) # Test not batched input lowerCAmelCase : List[Any] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : Dict = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : List[Any] = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : int = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) def _snake_case ( self ) -> List[str]: # Initialize image_processing lowerCAmelCase : List[Any] = self.image_processing_class(**self.image_processor_dict ) # create random PyTorch tensors lowerCAmelCase : Any = prepare_image_inputs(self.image_processor_tester , equal_resolution=lowercase_ , torchify=lowercase_ ) for image in image_inputs: self.assertIsInstance(lowercase_ , torch.Tensor ) # Test not batched input lowerCAmelCase : Optional[int] = image_processing(image_inputs[0] , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : str = self.image_processor_tester.get_expected_values(lowercase_ ) self.assertEqual( encoded_images.shape , (1, self.image_processor_tester.num_channels, expected_height, expected_width) , ) # Test batched lowerCAmelCase : str = image_processing(lowercase_ , return_tensors="""pt""" ).pixel_values lowerCAmelCase , lowerCAmelCase : List[str] = self.image_processor_tester.get_expected_values(lowercase_ , batched=lowercase_ ) self.assertEqual( encoded_images.shape , ( self.image_processor_tester.batch_size, self.image_processor_tester.num_channels, expected_height, expected_width, ) , ) @slow def _snake_case ( self ) -> int: # prepare image and target lowerCAmelCase : Tuple = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_annotations.txt""" , """r""" ) as f: lowerCAmelCase : str = json.loads(f.read() ) lowerCAmelCase : List[Any] = {"""image_id""": 39769, """annotations""": target} # encode them lowerCAmelCase : Dict = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50""" ) lowerCAmelCase : List[str] = image_processing(images=lowercase_ , annotations=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Union[str, Any] = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : List[str] = torch.tensor([5_8_8_7.9_6_0_0, 1_1_2_5_0.2_0_6_1, 4_8_9_3_5_3.8_4_3_8, 8_3_7_1_2_2.7_5_0_0, 1_4_7_9_6_7.5_1_5_6, 1_6_5_7_3_2.3_4_3_8] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Tuple = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Dict = torch.tensor([0.5_5_0_3, 0.2_7_6_5, 0.0_6_0_4, 0.2_2_1_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : List[Any] = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Union[str, Any] = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : str = torch.tensor([75, 75, 63, 65, 17, 17] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify orig_size lowerCAmelCase : int = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : str = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) ) @slow def _snake_case ( self ) -> int: # prepare image, target and masks_path lowerCAmelCase : List[Any] = Image.open("""./tests/fixtures/tests_samples/COCO/000000039769.png""" ) with open("""./tests/fixtures/tests_samples/COCO/coco_panoptic_annotations.txt""" , """r""" ) as f: lowerCAmelCase : Any = json.loads(f.read() ) lowerCAmelCase : Optional[Any] = {"""file_name""": """000000039769.png""", """image_id""": 39769, """segments_info""": target} lowerCAmelCase : List[str] = pathlib.Path("""./tests/fixtures/tests_samples/COCO/coco_panoptic""" ) # encode them lowerCAmelCase : Any = DetrImageProcessor.from_pretrained("""facebook/detr-resnet-50-panoptic""" ) lowerCAmelCase : Tuple = image_processing(images=lowercase_ , annotations=lowercase_ , masks_path=lowercase_ , return_tensors="""pt""" ) # verify pixel values lowerCAmelCase : Tuple = torch.Size([1, 3, 800, 1066] ) self.assertEqual(encoding["""pixel_values"""].shape , lowercase_ ) lowerCAmelCase : str = torch.tensor([0.2_7_9_6, 0.3_1_3_8, 0.3_4_8_1] ) self.assertTrue(torch.allclose(encoding["""pixel_values"""][0, 0, 0, :3] , lowercase_ , atol=1e-4 ) ) # verify area lowerCAmelCase : Union[str, Any] = torch.tensor([1_4_7_9_7_9.6_8_7_5, 1_6_5_5_2_7.0_4_6_9, 4_8_4_6_3_8.5_9_3_8, 1_1_2_9_2.9_3_7_5, 5_8_7_9.6_5_6_2, 7_6_3_4.1_1_4_7] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""area"""] , lowercase_ ) ) # verify boxes lowerCAmelCase : Optional[int] = torch.Size([6, 4] ) self.assertEqual(encoding["""labels"""][0]["""boxes"""].shape , lowercase_ ) lowerCAmelCase : Union[str, Any] = torch.tensor([0.2_6_2_5, 0.5_4_3_7, 0.4_6_8_8, 0.8_6_2_5] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""boxes"""][0] , lowercase_ , atol=1e-3 ) ) # verify image_id lowerCAmelCase : Tuple = torch.tensor([39769] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""image_id"""] , lowercase_ ) ) # verify is_crowd lowerCAmelCase : Any = torch.tensor([0, 0, 0, 0, 0, 0] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""iscrowd"""] , lowercase_ ) ) # verify class_labels lowerCAmelCase : Tuple = torch.tensor([17, 17, 63, 75, 75, 93] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""class_labels"""] , lowercase_ ) ) # verify masks lowerCAmelCase : Union[str, Any] = 822873 self.assertEqual(encoding["""labels"""][0]["""masks"""].sum().item() , lowercase_ ) # verify orig_size lowerCAmelCase : str = torch.tensor([480, 640] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""orig_size"""] , lowercase_ ) ) # verify size lowerCAmelCase : List[str] = torch.tensor([800, 1066] ) self.assertTrue(torch.allclose(encoding["""labels"""][0]["""size"""] , lowercase_ ) )
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase , lowerCAmelCase : Dict = len(UpperCamelCase__ ), len(grid[0] ) if ( min(UpperCamelCase__ ,UpperCamelCase__ ) < 0 or row == row_length or col == col_length or (row, col) in visit or grid[row][col] == 1 ): return 0 if row == row_length - 1 and col == col_length - 1: return 1 visit.add((row, col) ) lowerCAmelCase : Optional[int] = 0 count += depth_first_search(UpperCamelCase__ ,row + 1 ,UpperCamelCase__ ,UpperCamelCase__ ) count += depth_first_search(UpperCamelCase__ ,row - 1 ,UpperCamelCase__ ,UpperCamelCase__ ) count += depth_first_search(UpperCamelCase__ ,UpperCamelCase__ ,col + 1 ,UpperCamelCase__ ) count += depth_first_search(UpperCamelCase__ ,UpperCamelCase__ ,col - 1 ,UpperCamelCase__ ) visit.remove((row, col) ) return count if __name__ == "__main__": import doctest doctest.testmod()
715
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = 0 while b > 0: if b & 1: res += a a += a b >>= 1 return res def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = 0 while b > 0: if b & 1: lowerCAmelCase : Optional[int] = ((res % c) + (a % c)) % c a += a b >>= 1 return res
693
0
from typing import List, Optional, Union import torch from ...models import UNetaDConditionModel, VQModel from ...pipelines import DiffusionPipeline from ...pipelines.pipeline_utils import ImagePipelineOutput from ...schedulers import DDPMScheduler from ...utils import ( is_accelerate_available, is_accelerate_version, logging, randn_tensor, replace_example_docstring, ) lowerCAmelCase : List[Any] =logging.get_logger(__name__) # pylint: disable=invalid-name lowerCAmelCase : List[str] =''' Examples: ```py >>> from diffusers import KandinskyV22Pipeline, KandinskyV22PriorPipeline >>> import torch >>> pipe_prior = KandinskyV22PriorPipeline.from_pretrained("kandinsky-community/kandinsky-2-2-prior") >>> pipe_prior.to("cuda") >>> prompt = "red cat, 4k photo" >>> out = pipe_prior(prompt) >>> image_emb = out.image_embeds >>> zero_image_emb = out.negative_image_embeds >>> pipe = KandinskyV22Pipeline.from_pretrained("kandinsky-community/kandinsky-2-2-decoder") >>> pipe.to("cuda") >>> image = pipe( ... image_embeds=image_emb, ... negative_image_embeds=zero_image_emb, ... height=768, ... width=768, ... num_inference_steps=50, ... ).images >>> image[0].save("cat.png") ``` ''' def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__=8 ): '''simple docstring''' lowerCAmelCase : Union[str, Any] = height // scale_factor**2 if height % scale_factor**2 != 0: new_height += 1 lowerCAmelCase : Dict = width // scale_factor**2 if width % scale_factor**2 != 0: new_width += 1 return new_height * scale_factor, new_width * scale_factor class _a ( __UpperCAmelCase ): def __init__( self , lowercase_ , lowercase_ , lowercase_ , ) -> int: super().__init__() self.register_modules( unet=__SCREAMING_SNAKE_CASE , scheduler=__SCREAMING_SNAKE_CASE , movq=__SCREAMING_SNAKE_CASE , ) lowerCAmelCase : Union[str, Any] = 2 ** (len(self.movq.config.block_out_channels ) - 1) def _snake_case ( self , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ , lowercase_ ) -> int: if latents is None: lowerCAmelCase : Any = randn_tensor(__SCREAMING_SNAKE_CASE , generator=__SCREAMING_SNAKE_CASE , device=__SCREAMING_SNAKE_CASE , dtype=__SCREAMING_SNAKE_CASE ) else: if latents.shape != shape: raise ValueError(f"""Unexpected latents shape, got {latents.shape}, expected {shape}""" ) lowerCAmelCase : Tuple = latents.to(__SCREAMING_SNAKE_CASE ) lowerCAmelCase : str = latents * scheduler.init_noise_sigma return latents def _snake_case ( self , lowercase_=0 ) -> Tuple: if is_accelerate_available(): from accelerate import cpu_offload else: raise ImportError("""Please install accelerate via `pip install accelerate`""" ) lowerCAmelCase : Dict = torch.device(f"""cuda:{gpu_id}""" ) lowerCAmelCase : Optional[int] = [ self.unet, self.movq, ] for cpu_offloaded_model in models: if cpu_offloaded_model is not None: cpu_offload(__SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE ) def _snake_case ( self , lowercase_=0 ) -> List[Any]: if is_accelerate_available() and is_accelerate_version(""">=""" , """0.17.0.dev0""" ): from accelerate import cpu_offload_with_hook else: raise ImportError("""`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.""" ) lowerCAmelCase : List[str] = torch.device(f"""cuda:{gpu_id}""" ) if self.device.type != "cpu": self.to("""cpu""" , silence_dtype_warnings=__SCREAMING_SNAKE_CASE ) torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist) lowerCAmelCase : List[str] = None for cpu_offloaded_model in [self.unet, self.movq]: lowerCAmelCase , lowerCAmelCase : Union[str, Any] = cpu_offload_with_hook(__SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE , prev_module_hook=__SCREAMING_SNAKE_CASE ) # We'll offload the last model manually. lowerCAmelCase : int = hook @property # Copied from diffusers.pipelines.stable_diffusion.pipeline_stable_diffusion.StableDiffusionPipeline._execution_device def _snake_case ( self ) -> Union[str, Any]: if not hasattr(self.unet , """_hf_hook""" ): return self.device for module in self.unet.modules(): if ( hasattr(__SCREAMING_SNAKE_CASE , """_hf_hook""" ) and hasattr(module._hf_hook , """execution_device""" ) and module._hf_hook.execution_device is not None ): return torch.device(module._hf_hook.execution_device ) return self.device @torch.no_grad() @replace_example_docstring(__SCREAMING_SNAKE_CASE ) def __call__( self , lowercase_ , lowercase_ , lowercase_ = 512 , lowercase_ = 512 , lowercase_ = 100 , lowercase_ = 4.0 , lowercase_ = 1 , lowercase_ = None , lowercase_ = None , lowercase_ = "pil" , lowercase_ = True , ) -> int: lowerCAmelCase : Dict = self._execution_device lowerCAmelCase : List[Any] = guidance_scale > 1.0 if isinstance(__SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE ): lowerCAmelCase : str = torch.cat(__SCREAMING_SNAKE_CASE , dim=0 ) lowerCAmelCase : str = image_embeds.shape[0] * num_images_per_prompt if isinstance(__SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE ): lowerCAmelCase : List[str] = torch.cat(__SCREAMING_SNAKE_CASE , dim=0 ) if do_classifier_free_guidance: lowerCAmelCase : int = image_embeds.repeat_interleave(__SCREAMING_SNAKE_CASE , dim=0 ) lowerCAmelCase : str = negative_image_embeds.repeat_interleave(__SCREAMING_SNAKE_CASE , dim=0 ) lowerCAmelCase : Tuple = torch.cat([negative_image_embeds, image_embeds] , dim=0 ).to(dtype=self.unet.dtype , device=__SCREAMING_SNAKE_CASE ) self.scheduler.set_timesteps(__SCREAMING_SNAKE_CASE , device=__SCREAMING_SNAKE_CASE ) lowerCAmelCase : Optional[Any] = self.scheduler.timesteps lowerCAmelCase : List[Any] = self.unet.config.in_channels lowerCAmelCase , lowerCAmelCase : Any = downscale_height_and_width(__SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE , self.movq_scale_factor ) # create initial latent lowerCAmelCase : Dict = self.prepare_latents( (batch_size, num_channels_latents, height, width) , image_embeds.dtype , __SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE , self.scheduler , ) for i, t in enumerate(self.progress_bar(__SCREAMING_SNAKE_CASE ) ): # expand the latents if we are doing classifier free guidance lowerCAmelCase : int = torch.cat([latents] * 2 ) if do_classifier_free_guidance else latents lowerCAmelCase : int = {"""image_embeds""": image_embeds} lowerCAmelCase : int = self.unet( sample=__SCREAMING_SNAKE_CASE , timestep=__SCREAMING_SNAKE_CASE , encoder_hidden_states=__SCREAMING_SNAKE_CASE , added_cond_kwargs=__SCREAMING_SNAKE_CASE , return_dict=__SCREAMING_SNAKE_CASE , )[0] if do_classifier_free_guidance: lowerCAmelCase , lowerCAmelCase : Any = noise_pred.split(latents.shape[1] , dim=1 ) lowerCAmelCase , lowerCAmelCase : List[Any] = noise_pred.chunk(2 ) lowerCAmelCase , lowerCAmelCase : Tuple = variance_pred.chunk(2 ) lowerCAmelCase : Dict = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond) lowerCAmelCase : Optional[int] = torch.cat([noise_pred, variance_pred_text] , dim=1 ) if not ( hasattr(self.scheduler.config , """variance_type""" ) and self.scheduler.config.variance_type in ["learned", "learned_range"] ): lowerCAmelCase , lowerCAmelCase : int = noise_pred.split(latents.shape[1] , dim=1 ) # compute the previous noisy sample x_t -> x_t-1 lowerCAmelCase : Optional[Any] = self.scheduler.step( __SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE , __SCREAMING_SNAKE_CASE , generator=__SCREAMING_SNAKE_CASE , )[0] # post-processing lowerCAmelCase : Optional[Any] = self.movq.decode(__SCREAMING_SNAKE_CASE , force_not_quantize=__SCREAMING_SNAKE_CASE )["""sample"""] if output_type not in ["pt", "np", "pil"]: raise ValueError(f"""Only the output types `pt`, `pil` and `np` are supported not output_type={output_type}""" ) if output_type in ["np", "pil"]: lowerCAmelCase : str = image * 0.5 + 0.5 lowerCAmelCase : str = image.clamp(0 , 1 ) lowerCAmelCase : Tuple = image.cpu().permute(0 , 2 , 3 , 1 ).float().numpy() if output_type == "pil": lowerCAmelCase : Optional[int] = self.numpy_to_pil(__SCREAMING_SNAKE_CASE ) if not return_dict: return (image,) return ImagePipelineOutput(images=__SCREAMING_SNAKE_CASE )
716
from math import factorial class _a : def __init__( self , lowercase_ , lowercase_ ) -> Optional[Any]: lowerCAmelCase : Union[str, Any] = real if isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Tuple = [1] * rank else: lowerCAmelCase : Any = rank def __repr__( self ) -> int: return ( f"""{self.real}+""" f"""{'+'.join(str(lowercase_ )+'E'+str(n+1 )for n,dual in enumerate(self.duals ) )}""" ) def _snake_case ( self ) -> Union[str, Any]: lowerCAmelCase : List[Any] = self.duals.copy() while cur[-1] == 0: cur.pop(-1 ) return Dual(self.real , lowercase_ ) def __add__( self , lowercase_ ) -> Tuple: if not isinstance(lowercase_ , lowercase_ ): return Dual(self.real + other , self.duals ) lowerCAmelCase : int = self.duals.copy() lowerCAmelCase : Tuple = other.duals.copy() if len(lowercase_ ) > len(lowercase_ ): o_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) elif len(lowercase_ ) < len(lowercase_ ): s_dual.extend([1] * (len(lowercase_ ) - len(lowercase_ )) ) lowerCAmelCase : List[Any] = [] for i in range(len(lowercase_ ) ): new_duals.append(s_dual[i] + o_dual[i] ) return Dual(self.real + other.real , lowercase_ ) _UpperCamelCase: List[Any] = __add__ def __sub__( self , lowercase_ ) -> Union[str, Any]: return self + other * -1 def __mul__( self , lowercase_ ) -> Optional[int]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : Union[str, Any] = [] for i in self.duals: new_duals.append(i * other ) return Dual(self.real * other , lowercase_ ) lowerCAmelCase : Union[str, Any] = [0] * (len(self.duals ) + len(other.duals ) + 1) for i, item in enumerate(self.duals ): for j, jtem in enumerate(other.duals ): new_duals[i + j + 1] += item * jtem for k in range(len(self.duals ) ): new_duals[k] += self.duals[k] * other.real for index in range(len(other.duals ) ): new_duals[index] += other.duals[index] * self.real return Dual(self.real * other.real , lowercase_ ) _UpperCamelCase: str = __mul__ def __truediv__( self , lowercase_ ) -> Optional[Any]: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[str] = [] for i in self.duals: new_duals.append(i / other ) return Dual(self.real / other , lowercase_ ) raise ValueError def __floordiv__( self , lowercase_ ) -> int: if not isinstance(lowercase_ , lowercase_ ): lowerCAmelCase : List[Any] = [] for i in self.duals: new_duals.append(i // other ) return Dual(self.real // other , lowercase_ ) raise ValueError def __pow__( self , lowercase_ ) -> str: if n < 0 or isinstance(lowercase_ , lowercase_ ): raise ValueError("""power must be a positive integer""" ) if n == 0: return 1 if n == 1: return self lowerCAmelCase : int = self for _ in range(n - 1 ): x *= self return x def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if not callable(SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires a function as input for func""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,(float, int) ): raise ValueError("""differentiate() requires a float as input for position""" ) if not isinstance(SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): raise ValueError("""differentiate() requires an int as input for order""" ) lowerCAmelCase : List[Any] = Dual(SCREAMING_SNAKE_CASE__ ,1 ) lowerCAmelCase : Optional[Any] = func(SCREAMING_SNAKE_CASE__ ) if order == 0: return result.real return result.duals[order - 1] * factorial(SCREAMING_SNAKE_CASE__ ) if __name__ == "__main__": import doctest doctest.testmod() def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return y**2 * y**4 print(differentiate(f, 9, 2))
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Optional[Any] = 0 for ch in input_str: lowerCAmelCase : str = ord(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : List[str] = pow(2 ,SCREAMING_SNAKE_CASE__ ) # If we already turned on bit for current character's unicode if bitmap >> ch_unicode & 1 == 1: return False bitmap |= ch_bit_index_on return True if __name__ == "__main__": import doctest doctest.testmod()
717
from ..utils import DummyObject, requires_backends class _a ( metaclass=snake_case_ ): _UpperCamelCase: List[Any] = ["keras_nlp"] def __init__( self , *lowercase_ , **lowercase_ ) -> Tuple: requires_backends(self , ["""keras_nlp"""] )
693
0
import dataclasses import json import sys import types from argparse import ArgumentDefaultsHelpFormatter, ArgumentParser, ArgumentTypeError from copy import copy from enum import Enum from inspect import isclass from pathlib import Path from typing import Any, Callable, Dict, Iterable, List, Literal, NewType, Optional, Tuple, Union, get_type_hints import yaml lowerCAmelCase : Dict =NewType('DataClass', Any) lowerCAmelCase : Any =NewType('DataClassType', Any) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if isinstance(_lowercase ,_lowercase ): return v if v.lower() in ("yes", "true", "t", "y", "1"): return True elif v.lower() in ("no", "false", "f", "n", "0"): return False else: raise ArgumentTypeError( F"""Truthy value expected: got {v} but expected one of yes/no, true/false, t/f, y/n, 1/0 (case insensitive).""" ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Tuple = {str(_lowercase ): choice for choice in choices} return lambda SCREAMING_SNAKE_CASE__ : str_to_choice.get(_lowercase ,_lowercase ) def _UpperCAmelCase ( *, SCREAMING_SNAKE_CASE__ = None ,SCREAMING_SNAKE_CASE__ = None ,SCREAMING_SNAKE_CASE__ = dataclasses.MISSING ,SCREAMING_SNAKE_CASE__ = dataclasses.MISSING ,SCREAMING_SNAKE_CASE__ = None ,**SCREAMING_SNAKE_CASE__ ,): '''simple docstring''' if metadata is None: # Important, don't use as default param in function signature because dict is mutable and shared across function calls lowerCAmelCase : List[Any] = {} if aliases is not None: lowerCAmelCase : Optional[int] = aliases if help is not None: lowerCAmelCase : List[str] = help return dataclasses.field(metadata=_lowercase ,default=_lowercase ,default_factory=_lowercase ,**_lowercase ) class _a ( snake_case_ ): _UpperCamelCase: List[str] = 42 def __init__( self , lowercase_ , **lowercase_ ) -> str: # To make the default appear when using --help if "formatter_class" not in kwargs: lowerCAmelCase : Optional[Any] = ArgumentDefaultsHelpFormatter super().__init__(**__A ) if dataclasses.is_dataclass(__A ): lowerCAmelCase : Optional[int] = [dataclass_types] lowerCAmelCase : int = list(__A ) for dtype in self.dataclass_types: self._add_dataclass_arguments(__A ) @staticmethod def _snake_case ( lowercase_ , lowercase_ ) -> str: lowerCAmelCase : Dict = f"""--{field.name}""" lowerCAmelCase : Dict = field.metadata.copy() # field.metadata is not used at all by Data Classes, # it is provided as a third-party extension mechanism. if isinstance(field.type , __A ): raise RuntimeError( """Unresolved type detected, which should have been done with the help of """ """`typing.get_type_hints` method by default""" ) lowerCAmelCase : Dict = kwargs.pop("""aliases""" , [] ) if isinstance(__A , __A ): lowerCAmelCase : int = [aliases] lowerCAmelCase : int = getattr(field.type , """__origin__""" , field.type ) if origin_type is Union or (hasattr(__A , """UnionType""" ) and isinstance(__A , types.UnionType )): if str not in field.type.__args__ and ( len(field.type.__args__ ) != 2 or type(__A ) not in field.type.__args__ ): raise ValueError( """Only `Union[X, NoneType]` (i.e., `Optional[X]`) is allowed for `Union` because""" """ the argument parser only supports one type per argument.""" f""" Problem encountered in field '{field.name}'.""" ) if type(__A ) not in field.type.__args__: # filter `str` in Union lowerCAmelCase : Optional[Any] = field.type.__args__[0] if field.type.__args__[1] == str else field.type.__args__[1] lowerCAmelCase : Dict = getattr(field.type , """__origin__""" , field.type ) elif bool not in field.type.__args__: # filter `NoneType` in Union (except for `Union[bool, NoneType]`) lowerCAmelCase : str = ( field.type.__args__[0] if isinstance(__A , field.type.__args__[1] ) else field.type.__args__[1] ) lowerCAmelCase : str = getattr(field.type , """__origin__""" , field.type ) # A variable to store kwargs for a boolean field, if needed # so that we can init a `no_*` complement argument (see below) lowerCAmelCase : int = {} if origin_type is Literal or (isinstance(field.type , __A ) and issubclass(field.type , __A )): if origin_type is Literal: lowerCAmelCase : Optional[int] = field.type.__args__ else: lowerCAmelCase : Dict = [x.value for x in field.type] lowerCAmelCase : Dict = make_choice_type_function(kwargs["""choices"""] ) if field.default is not dataclasses.MISSING: lowerCAmelCase : str = field.default else: lowerCAmelCase : Optional[int] = True elif field.type is bool or field.type == Optional[bool]: # Copy the currect kwargs to use to instantiate a `no_*` complement argument below. # We do not initialize it here because the `no_*` alternative must be instantiated after the real argument lowerCAmelCase : int = copy(__A ) # Hack because type=bool in argparse does not behave as we want. lowerCAmelCase : Any = string_to_bool if field.type is bool or (field.default is not None and field.default is not dataclasses.MISSING): # Default value is False if we have no default when of type bool. lowerCAmelCase : Dict = False if field.default is dataclasses.MISSING else field.default # This is the value that will get picked if we don't include --field_name in any way lowerCAmelCase : Dict = default # This tells argparse we accept 0 or 1 value after --field_name lowerCAmelCase : Optional[int] = """?""" # This is the value that will get picked if we do --field_name (without value) lowerCAmelCase : str = True elif isclass(__A ) and issubclass(__A , __A ): lowerCAmelCase : Tuple = field.type.__args__[0] lowerCAmelCase : str = """+""" if field.default_factory is not dataclasses.MISSING: lowerCAmelCase : List[str] = field.default_factory() elif field.default is dataclasses.MISSING: lowerCAmelCase : Dict = True else: lowerCAmelCase : Tuple = field.type if field.default is not dataclasses.MISSING: lowerCAmelCase : List[str] = field.default elif field.default_factory is not dataclasses.MISSING: lowerCAmelCase : List[Any] = field.default_factory() else: lowerCAmelCase : int = True parser.add_argument(__A , *__A , **__A ) # Add a complement `no_*` argument for a boolean field AFTER the initial field has already been added. # Order is important for arguments with the same destination! # We use a copy of earlier kwargs because the original kwargs have changed a lot before reaching down # here and we do not need those changes/additional keys. if field.default is True and (field.type is bool or field.type == Optional[bool]): lowerCAmelCase : List[Any] = False parser.add_argument(f"""--no_{field.name}""" , action="""store_false""" , dest=field.name , **__A ) def _snake_case ( self , lowercase_ ) -> List[Any]: if hasattr(__A , """_argument_group_name""" ): lowerCAmelCase : Optional[Any] = self.add_argument_group(dtype._argument_group_name ) else: lowerCAmelCase : Optional[Any] = self try: lowerCAmelCase : Optional[int] = get_type_hints(__A ) except NameError: raise RuntimeError( f"""Type resolution failed for {dtype}. Try declaring the class in global scope or """ """removing line of `from __future__ import annotations` which opts in Postponed """ """Evaluation of Annotations (PEP 563)""" ) except TypeError as ex: # Remove this block when we drop Python 3.9 support if sys.version_info[:2] < (3, 10) and "unsupported operand type(s) for |" in str(__A ): lowerCAmelCase : Tuple = """.""".join(map(__A , sys.version_info[:3] ) ) raise RuntimeError( f"""Type resolution failed for {dtype} on Python {python_version}. Try removing """ """line of `from __future__ import annotations` which opts in union types as """ """`X | Y` (PEP 604) via Postponed Evaluation of Annotations (PEP 563). To """ """support Python versions that lower than 3.10, you need to use """ """`typing.Union[X, Y]` instead of `X | Y` and `typing.Optional[X]` instead of """ """`X | None`.""" ) from ex raise for field in dataclasses.fields(__A ): if not field.init: continue lowerCAmelCase : Dict = type_hints[field.name] self._parse_dataclass_field(__A , __A ) def _snake_case ( self , lowercase_=None , lowercase_=False , lowercase_=True , lowercase_=None , lowercase_=None , ) -> Tuple[DataClass, ...]: if args_file_flag or args_filename or (look_for_args_file and len(sys.argv )): lowerCAmelCase : Tuple = [] if args_filename: args_files.append(Path(__A ) ) elif look_for_args_file and len(sys.argv ): args_files.append(Path(sys.argv[0] ).with_suffix(""".args""" ) ) # args files specified via command line flag should overwrite default args files so we add them last if args_file_flag: # Create special parser just to extract the args_file_flag values lowerCAmelCase : Any = ArgumentParser() args_file_parser.add_argument(__A , type=__A , action="""append""" ) # Use only remaining args for further parsing (remove the args_file_flag) lowerCAmelCase , lowerCAmelCase : Optional[Any] = args_file_parser.parse_known_args(args=__A ) lowerCAmelCase : Dict = vars(__A ).get(args_file_flag.lstrip("""-""" ) , __A ) if cmd_args_file_paths: args_files.extend([Path(__A ) for p in cmd_args_file_paths] ) lowerCAmelCase : Optional[Any] = [] for args_file in args_files: if args_file.exists(): file_args += args_file.read_text().split() # in case of duplicate arguments the last one has precedence # args specified via the command line should overwrite args from files, so we add them last lowerCAmelCase : Tuple = file_args + args if args is not None else file_args + sys.argv[1:] lowerCAmelCase , lowerCAmelCase : Tuple = self.parse_known_args(args=__A ) lowerCAmelCase : List[Any] = [] for dtype in self.dataclass_types: lowerCAmelCase : int = {f.name for f in dataclasses.fields(__A ) if f.init} lowerCAmelCase : str = {k: v for k, v in vars(__A ).items() if k in keys} for k in keys: delattr(__A , __A ) lowerCAmelCase : int = dtype(**__A ) outputs.append(__A ) if len(namespace.__dict__ ) > 0: # additional namespace. outputs.append(__A ) if return_remaining_strings: return (*outputs, remaining_args) else: if remaining_args: raise ValueError(f"""Some specified arguments are not used by the HfArgumentParser: {remaining_args}""" ) return (*outputs,) def _snake_case ( self , lowercase_ , lowercase_ = False ) -> Tuple[DataClass, ...]: lowerCAmelCase : Optional[int] = set(args.keys() ) lowerCAmelCase : Optional[int] = [] for dtype in self.dataclass_types: lowerCAmelCase : Optional[Any] = {f.name for f in dataclasses.fields(__A ) if f.init} lowerCAmelCase : Union[str, Any] = {k: v for k, v in args.items() if k in keys} unused_keys.difference_update(inputs.keys() ) lowerCAmelCase : Optional[int] = dtype(**__A ) outputs.append(__A ) if not allow_extra_keys and unused_keys: raise ValueError(f"""Some keys are not used by the HfArgumentParser: {sorted(__A )}""" ) return tuple(__A ) def _snake_case ( self , lowercase_ , lowercase_ = False ) -> Tuple[DataClass, ...]: with open(Path(__A ) , encoding="""utf-8""" ) as open_json_file: lowerCAmelCase : List[Any] = json.loads(open_json_file.read() ) lowerCAmelCase : Optional[Any] = self.parse_dict(__A , allow_extra_keys=__A ) return tuple(__A ) def _snake_case ( self , lowercase_ , lowercase_ = False ) -> Tuple[DataClass, ...]: lowerCAmelCase : Dict = self.parse_dict(yaml.safe_load(Path(__A ).read_text() ) , allow_extra_keys=__A ) return tuple(__A )
718
from ...utils import ( OptionalDependencyNotAvailable, is_torch_available, is_transformers_available, is_transformers_version, ) try: if not (is_transformers_available() and is_torch_available() and is_transformers_version('>=', '4.25.0')): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: from ...utils.dummy_torch_and_transformers_objects import UnCLIPImageVariationPipeline, UnCLIPPipeline else: from .pipeline_unclip import UnCLIPPipeline from .pipeline_unclip_image_variation import UnCLIPImageVariationPipeline from .text_proj import UnCLIPTextProjModel
693
0
import json import os import unittest from transformers import BatchEncoding, LEDTokenizer, LEDTokenizerFast from transformers.models.led.tokenization_led import VOCAB_FILES_NAMES from transformers.testing_utils import require_tokenizers, require_torch from transformers.utils import cached_property from ...test_tokenization_common import TokenizerTesterMixin @require_tokenizers class _a ( __snake_case , unittest.TestCase ): _UpperCamelCase: Tuple = LEDTokenizer _UpperCamelCase: str = LEDTokenizerFast _UpperCamelCase: Union[str, Any] = True def _snake_case ( self ) -> List[Any]: super().setUp() lowerCAmelCase : str = [ """l""", """o""", """w""", """e""", """r""", """s""", """t""", """i""", """d""", """n""", """\u0120""", """\u0120l""", """\u0120n""", """\u0120lo""", """\u0120low""", """er""", """\u0120lowest""", """\u0120newer""", """\u0120wider""", """<unk>""", ] lowerCAmelCase : int = dict(zip(_lowercase , range(len(_lowercase ) ) ) ) lowerCAmelCase : Optional[int] = ["""#version: 0.2""", """\u0120 l""", """\u0120l o""", """\u0120lo w""", """e r""", """"""] lowerCAmelCase : List[Any] = {"""unk_token""": """<unk>"""} lowerCAmelCase : Union[str, Any] = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES["""vocab_file"""] ) lowerCAmelCase : Union[str, Any] = os.path.join(self.tmpdirname , VOCAB_FILES_NAMES["""merges_file"""] ) with open(self.vocab_file , """w""" , encoding="""utf-8""" ) as fp: fp.write(json.dumps(_lowercase ) + """\n""" ) with open(self.merges_file , """w""" , encoding="""utf-8""" ) as fp: fp.write("""\n""".join(_lowercase ) ) def _snake_case ( self , **lowercase_ ) -> Optional[Any]: kwargs.update(self.special_tokens_map ) return self.tokenizer_class.from_pretrained(self.tmpdirname , **_lowercase ) def _snake_case ( self , **lowercase_ ) -> List[Any]: kwargs.update(self.special_tokens_map ) return self.rust_tokenizer_class.from_pretrained(self.tmpdirname , **_lowercase ) def _snake_case ( self , lowercase_ ) -> Union[str, Any]: return "lower newer", "lower newer" @cached_property def _snake_case ( self ) -> Optional[Any]: return LEDTokenizer.from_pretrained("""allenai/led-base-16384""" ) @cached_property def _snake_case ( self ) -> Tuple: return LEDTokenizerFast.from_pretrained("""allenai/led-base-16384""" ) @require_torch def _snake_case ( self ) -> List[Any]: lowerCAmelCase : Any = ["""A long paragraph for summarization.""", """Another paragraph for summarization."""] lowerCAmelCase : List[str] = [0, 250, 251, 17818, 13, 39186, 1938, 4, 2] for tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]: lowerCAmelCase : Optional[int] = tokenizer(_lowercase , max_length=len(_lowercase ) , padding=_lowercase , return_tensors="""pt""" ) self.assertIsInstance(_lowercase , _lowercase ) self.assertEqual((2, 9) , batch.input_ids.shape ) self.assertEqual((2, 9) , batch.attention_mask.shape ) lowerCAmelCase : Dict = batch.input_ids.tolist()[0] self.assertListEqual(_lowercase , _lowercase ) @require_torch def _snake_case ( self ) -> Tuple: lowerCAmelCase : Union[str, Any] = ["""A long paragraph for summarization.""", """Another paragraph for summarization."""] for tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]: lowerCAmelCase : Tuple = tokenizer(_lowercase , padding=_lowercase , return_tensors="""pt""" ) self.assertIn("""input_ids""" , _lowercase ) self.assertIn("""attention_mask""" , _lowercase ) self.assertNotIn("""labels""" , _lowercase ) self.assertNotIn("""decoder_attention_mask""" , _lowercase ) @require_torch def _snake_case ( self ) -> Dict: lowerCAmelCase : Optional[Any] = [ """Summary of the text.""", """Another summary.""", ] for tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]: lowerCAmelCase : Dict = tokenizer(text_target=_lowercase , max_length=32 , padding="""max_length""" , return_tensors="""pt""" ) self.assertEqual(32 , targets["""input_ids"""].shape[1] ) @require_torch def _snake_case ( self ) -> str: for tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]: lowerCAmelCase : Optional[int] = tokenizer( ["""I am a small frog""" * 1024, """I am a small frog"""] , padding=_lowercase , truncation=_lowercase , return_tensors="""pt""" ) self.assertIsInstance(_lowercase , _lowercase ) self.assertEqual(batch.input_ids.shape , (2, 5122) ) @require_torch def _snake_case ( self ) -> Dict: lowerCAmelCase : Tuple = ["""A long paragraph for summarization."""] lowerCAmelCase : Dict = [ """Summary of the text.""", ] for tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]: lowerCAmelCase : int = tokenizer(_lowercase , return_tensors="""pt""" ) lowerCAmelCase : Dict = tokenizer(text_target=_lowercase , return_tensors="""pt""" ) lowerCAmelCase : List[str] = inputs["""input_ids"""] lowerCAmelCase : List[Any] = targets["""input_ids"""] self.assertTrue((input_ids[:, 0] == tokenizer.bos_token_id).all().item() ) self.assertTrue((labels[:, 0] == tokenizer.bos_token_id).all().item() ) self.assertTrue((input_ids[:, -1] == tokenizer.eos_token_id).all().item() ) self.assertTrue((labels[:, -1] == tokenizer.eos_token_id).all().item() ) @require_torch def _snake_case ( self ) -> int: for tokenizer in [self.default_tokenizer, self.default_tokenizer_fast]: lowerCAmelCase : Optional[Any] = ["""Summary of the text.""", """Another summary."""] lowerCAmelCase : List[Any] = [[0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, -1, -1]] lowerCAmelCase : Union[str, Any] = tokenizer(_lowercase , padding=_lowercase ) lowerCAmelCase : Tuple = [[0] * len(_lowercase ) for x in encoded_output["""input_ids"""]] lowerCAmelCase : Union[str, Any] = tokenizer.pad(_lowercase ) self.assertSequenceEqual(outputs["""global_attention_mask"""] , _lowercase ) def _snake_case ( self ) -> List[Any]: pass def _snake_case ( self ) -> List[str]: for tokenizer, pretrained_name, kwargs in self.tokenizers_list: with self.subTest(f"""{tokenizer.__class__.__name__} ({pretrained_name})""" ): lowerCAmelCase : Dict = self.rust_tokenizer_class.from_pretrained(_lowercase , **_lowercase ) lowerCAmelCase : Union[str, Any] = self.tokenizer_class.from_pretrained(_lowercase , **_lowercase ) lowerCAmelCase : Union[str, Any] = """A, <mask> AllenNLP sentence.""" lowerCAmelCase : Dict = tokenizer_r.encode_plus(_lowercase , add_special_tokens=_lowercase , return_token_type_ids=_lowercase ) lowerCAmelCase : Tuple = tokenizer_p.encode_plus(_lowercase , add_special_tokens=_lowercase , return_token_type_ids=_lowercase ) self.assertEqual(sum(tokens_r["""token_type_ids"""] ) , sum(tokens_p["""token_type_ids"""] ) ) self.assertEqual( sum(tokens_r["""attention_mask"""] ) / len(tokens_r["""attention_mask"""] ) , sum(tokens_p["""attention_mask"""] ) / len(tokens_p["""attention_mask"""] ) , ) lowerCAmelCase : Tuple = tokenizer_r.convert_ids_to_tokens(tokens_r["""input_ids"""] ) lowerCAmelCase : Any = tokenizer_p.convert_ids_to_tokens(tokens_p["""input_ids"""] ) self.assertSequenceEqual(tokens_p["""input_ids"""] , [0, 250, 6, 50264, 3823, 487, 21992, 3645, 4, 2] ) self.assertSequenceEqual(tokens_r["""input_ids"""] , [0, 250, 6, 50264, 3823, 487, 21992, 3645, 4, 2] ) self.assertSequenceEqual( _lowercase , ["""<s>""", """A""", """,""", """<mask>""", """ĠAllen""", """N""", """LP""", """Ġsentence""", """.""", """</s>"""] ) self.assertSequenceEqual( _lowercase , ["""<s>""", """A""", """,""", """<mask>""", """ĠAllen""", """N""", """LP""", """Ġsentence""", """.""", """</s>"""] )
719
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if p < 2: raise ValueError("""p should not be less than 2!""" ) elif p == 2: return True lowerCAmelCase : List[Any] = 4 lowerCAmelCase : Tuple = (1 << p) - 1 for _ in range(p - 2 ): lowerCAmelCase : Dict = ((s * s) - 2) % m return s == 0 if __name__ == "__main__": print(lucas_lehmer_test(7)) print(lucas_lehmer_test(11))
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return not any( neighbour == 1 and colored_vertices[i] == color for i, neighbour in enumerate(lowercase_ ) ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if index == len(lowercase_ ): return True # Recursive Step for i in range(lowercase_ ): if valid_coloring(graph[index] ,lowercase_ ,lowercase_ ): # Color current vertex lowerCAmelCase : Optional[Any] = i # Validate coloring if util_color(lowercase_ ,lowercase_ ,lowercase_ ,index + 1 ): return True # Backtrack lowerCAmelCase : str = -1 return False def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Dict = [-1] * len(lowercase_ ) if util_color(lowercase_ ,lowercase_ ,lowercase_ ,0 ): return colored_vertices return []
720
import random import unittest import torch from diffusers import IFImgaImgSuperResolutionPipeline from diffusers.utils import floats_tensor from diffusers.utils.import_utils import is_xformers_available from diffusers.utils.testing_utils import skip_mps, torch_device from ..pipeline_params import TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS, TEXT_GUIDED_IMAGE_VARIATION_PARAMS from ..test_pipelines_common import PipelineTesterMixin from . import IFPipelineTesterMixin @skip_mps class _a ( snake_case_ , snake_case_ , unittest.TestCase ): _UpperCamelCase: Optional[Any] = IFImgaImgSuperResolutionPipeline _UpperCamelCase: int = TEXT_GUIDED_IMAGE_VARIATION_PARAMS - {"width", "height"} _UpperCamelCase: Optional[int] = TEXT_GUIDED_IMAGE_VARIATION_BATCH_PARAMS.union({"original_image"} ) _UpperCamelCase: List[str] = PipelineTesterMixin.required_optional_params - {"latents"} def _snake_case ( self ) -> int: return self._get_superresolution_dummy_components() def _snake_case ( self , lowercase_ , lowercase_=0 ) -> Optional[Any]: if str(lowercase_ ).startswith("""mps""" ): lowerCAmelCase : Any = torch.manual_seed(lowercase_ ) else: lowerCAmelCase : List[Any] = torch.Generator(device=lowercase_ ).manual_seed(lowercase_ ) lowerCAmelCase : Union[str, Any] = floats_tensor((1, 3, 32, 32) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[Any] = floats_tensor((1, 3, 16, 16) , rng=random.Random(lowercase_ ) ).to(lowercase_ ) lowerCAmelCase : Optional[int] = { """prompt""": """A painting of a squirrel eating a burger""", """image""": image, """original_image""": original_image, """generator""": generator, """num_inference_steps""": 2, """output_type""": """numpy""", } return inputs @unittest.skipIf( torch_device != """cuda""" or not is_xformers_available() , reason="""XFormers attention is only available with CUDA and `xformers` installed""" , ) def _snake_case ( self ) -> Optional[int]: self._test_xformers_attention_forwardGenerator_pass(expected_max_diff=1e-3 ) def _snake_case ( self ) -> int: self._test_save_load_optional_components() @unittest.skipIf(torch_device != """cuda""" , reason="""float16 requires CUDA""" ) def _snake_case ( self ) -> Any: # Due to non-determinism in save load of the hf-internal-testing/tiny-random-t5 text encoder super().test_save_load_floataa(expected_max_diff=1e-1 ) def _snake_case ( self ) -> int: self._test_attention_slicing_forward_pass(expected_max_diff=1e-2 ) def _snake_case ( self ) -> Any: self._test_save_load_local() def _snake_case ( self ) -> str: self._test_inference_batch_single_identical( expected_max_diff=1e-2 , )
693
0
from math import pi, sqrt, tan def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if side_length < 0: raise ValueError("""surface_area_cube() only accepts non-negative values""" ) return 6 * side_length**2 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if length < 0 or breadth < 0 or height < 0: raise ValueError("""surface_area_cuboid() only accepts non-negative values""" ) return 2 * ((length * breadth) + (breadth * height) + (length * height)) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if radius < 0: raise ValueError("""surface_area_sphere() only accepts non-negative values""" ) return 4 * pi * radius**2 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if radius < 0: raise ValueError("""surface_area_hemisphere() only accepts non-negative values""" ) return 3 * pi * radius**2 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if radius < 0 or height < 0: raise ValueError("""surface_area_cone() only accepts non-negative values""" ) return pi * radius * (radius + (height**2 + radius**2) ** 0.5) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if radius_a < 0 or radius_a < 0 or height < 0: raise ValueError( """surface_area_conical_frustum() only accepts non-negative values""" ) lowerCAmelCase : int = (height**2 + (radius_a - radius_a) ** 2) ** 0.5 return pi * ((slant_height * (radius_a + radius_a)) + radius_a**2 + radius_a**2) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if radius < 0 or height < 0: raise ValueError("""surface_area_cylinder() only accepts non-negative values""" ) return 2 * pi * radius * (height + radius) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if torus_radius < 0 or tube_radius < 0: raise ValueError("""surface_area_torus() only accepts non-negative values""" ) if torus_radius < tube_radius: raise ValueError( """surface_area_torus() does not support spindle or self intersecting tori""" ) return 4 * pow(lowerCAmelCase__ ,2 ) * torus_radius * tube_radius def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if length < 0 or width < 0: raise ValueError("""area_rectangle() only accepts non-negative values""" ) return length * width def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if side_length < 0: raise ValueError("""area_square() only accepts non-negative values""" ) return side_length**2 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if base < 0 or height < 0: raise ValueError("""area_triangle() only accepts non-negative values""" ) return (base * height) / 2 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if sidea < 0 or sidea < 0 or sidea < 0: raise ValueError("""area_triangle_three_sides() only accepts non-negative values""" ) elif sidea + sidea < sidea or sidea + sidea < sidea or sidea + sidea < sidea: raise ValueError("""Given three sides do not form a triangle""" ) lowerCAmelCase : Optional[int] = (sidea + sidea + sidea) / 2 lowerCAmelCase : str = sqrt( semi_perimeter * (semi_perimeter - sidea) * (semi_perimeter - sidea) * (semi_perimeter - sidea) ) return area def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if base < 0 or height < 0: raise ValueError("""area_parallelogram() only accepts non-negative values""" ) return base * height def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if basea < 0 or basea < 0 or height < 0: raise ValueError("""area_trapezium() only accepts non-negative values""" ) return 1 / 2 * (basea + basea) * height def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if radius < 0: raise ValueError("""area_circle() only accepts non-negative values""" ) return pi * radius**2 def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if radius_x < 0 or radius_y < 0: raise ValueError("""area_ellipse() only accepts non-negative values""" ) return pi * radius_x * radius_y def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if diagonal_a < 0 or diagonal_a < 0: raise ValueError("""area_rhombus() only accepts non-negative values""" ) return 1 / 2 * diagonal_a * diagonal_a def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if not isinstance(lowerCAmelCase__ ,lowerCAmelCase__ ) or sides < 3: raise ValueError( """area_reg_polygon() only accepts integers greater than or \ equal to three as number of sides""" ) elif length < 0: raise ValueError( """area_reg_polygon() only accepts non-negative values as \ length of a side""" ) return (sides * length**2) / (4 * tan(pi / sides )) return (sides * length**2) / (4 * tan(pi / sides )) if __name__ == "__main__": import doctest doctest.testmod(verbose=True) # verbose so we can see methods missing tests print('[DEMO] Areas of various geometric shapes: \n') print(F'''Rectangle: {area_rectangle(10, 20) = }''') print(F'''Square: {area_square(10) = }''') print(F'''Triangle: {area_triangle(10, 10) = }''') print(F'''Triangle: {area_triangle_three_sides(5, 12, 13) = }''') print(F'''Parallelogram: {area_parallelogram(10, 20) = }''') print(F'''Rhombus: {area_rhombus(10, 20) = }''') print(F'''Trapezium: {area_trapezium(10, 20, 30) = }''') print(F'''Circle: {area_circle(20) = }''') print(F'''Ellipse: {area_ellipse(10, 20) = }''') print('\nSurface Areas of various geometric shapes: \n') print(F'''Cube: {surface_area_cube(20) = }''') print(F'''Cuboid: {surface_area_cuboid(10, 20, 30) = }''') print(F'''Sphere: {surface_area_sphere(20) = }''') print(F'''Hemisphere: {surface_area_hemisphere(20) = }''') print(F'''Cone: {surface_area_cone(10, 20) = }''') print(F'''Conical Frustum: {surface_area_conical_frustum(10, 20, 30) = }''') print(F'''Cylinder: {surface_area_cylinder(10, 20) = }''') print(F'''Torus: {surface_area_torus(20, 10) = }''') print(F'''Equilateral Triangle: {area_reg_polygon(3, 10) = }''') print(F'''Square: {area_reg_polygon(4, 10) = }''') print(F'''Reqular Pentagon: {area_reg_polygon(5, 10) = }''')
721
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : Optional[int] =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={} class _a ( snake_case_ ): _UpperCamelCase: Tuple = "llama" _UpperCamelCase: List[str] = ["past_key_values"] def __init__( self , lowercase_=32000 , lowercase_=4096 , lowercase_=11008 , lowercase_=32 , lowercase_=32 , lowercase_=None , lowercase_="silu" , lowercase_=2048 , lowercase_=0.0_2 , lowercase_=1e-6 , lowercase_=True , lowercase_=0 , lowercase_=1 , lowercase_=2 , lowercase_=1 , lowercase_=False , lowercase_=None , **lowercase_ , ) -> Optional[int]: lowerCAmelCase : List[str] = vocab_size lowerCAmelCase : str = max_position_embeddings lowerCAmelCase : int = hidden_size lowerCAmelCase : Optional[int] = intermediate_size lowerCAmelCase : int = num_hidden_layers lowerCAmelCase : Any = num_attention_heads # for backward compatibility if num_key_value_heads is None: lowerCAmelCase : Any = num_attention_heads lowerCAmelCase : Any = num_key_value_heads lowerCAmelCase : Any = hidden_act lowerCAmelCase : Union[str, Any] = initializer_range lowerCAmelCase : str = rms_norm_eps lowerCAmelCase : int = pretraining_tp lowerCAmelCase : int = use_cache lowerCAmelCase : Optional[Any] = rope_scaling self._rope_scaling_validation() super().__init__( pad_token_id=lowercase_ , bos_token_id=lowercase_ , eos_token_id=lowercase_ , tie_word_embeddings=lowercase_ , **lowercase_ , ) def _snake_case ( self ) -> Dict: if self.rope_scaling is None: return if not isinstance(self.rope_scaling , lowercase_ ) or len(self.rope_scaling ) != 2: raise ValueError( """`rope_scaling` must be a dictionary with with two fields, `name` and `factor`, """ f"""got {self.rope_scaling}""" ) lowerCAmelCase : Union[str, Any] = self.rope_scaling.get("""type""" , lowercase_ ) lowerCAmelCase : Dict = self.rope_scaling.get("""factor""" , lowercase_ ) if rope_scaling_type is None or rope_scaling_type not in ["linear", "dynamic"]: raise ValueError( f"""`rope_scaling`'s name field must be one of ['linear', 'dynamic'], got {rope_scaling_type}""" ) if rope_scaling_factor is None or not isinstance(lowercase_ , lowercase_ ) or rope_scaling_factor <= 1.0: raise ValueError(f"""`rope_scaling`'s factor field must be an float > 1, got {rope_scaling_factor}""" )
693
0
'''simple docstring''' from __future__ import annotations from decimal import Decimal from numpy import array def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : List[Any] = Decimal # Check if the provided matrix has 2 rows and 2 columns # since this implementation only works for 2x2 matrices if len(_lowerCamelCase ) == 2 and len(matrix[0] ) == 2 and len(matrix[1] ) == 2: # Calculate the determinant of the matrix lowerCAmelCase : Union[str, Any] = float( d(matrix[0][0] ) * d(matrix[1][1] ) - d(matrix[1][0] ) * d(matrix[0][1] ) ) if determinant == 0: raise ValueError("""This matrix has no inverse.""" ) # Creates a copy of the matrix with swapped positions of the elements lowerCAmelCase : int = [[0.0, 0.0], [0.0, 0.0]] lowerCAmelCase , lowerCAmelCase : Dict = matrix[1][1], matrix[0][0] lowerCAmelCase , lowerCAmelCase : Union[str, Any] = -matrix[1][0], -matrix[0][1] # Calculate the inverse of the matrix return [ [(float(d(_lowerCamelCase ) ) / determinant) or 0.0 for n in row] for row in swapped_matrix ] elif ( len(_lowerCamelCase ) == 3 and len(matrix[0] ) == 3 and len(matrix[1] ) == 3 and len(matrix[2] ) == 3 ): # Calculate the determinant of the matrix using Sarrus rule lowerCAmelCase : str = float( ( (d(matrix[0][0] ) * d(matrix[1][1] ) * d(matrix[2][2] )) + (d(matrix[0][1] ) * d(matrix[1][2] ) * d(matrix[2][0] )) + (d(matrix[0][2] ) * d(matrix[1][0] ) * d(matrix[2][1] )) ) - ( (d(matrix[0][2] ) * d(matrix[1][1] ) * d(matrix[2][0] )) + (d(matrix[0][1] ) * d(matrix[1][0] ) * d(matrix[2][2] )) + (d(matrix[0][0] ) * d(matrix[1][2] ) * d(matrix[2][1] )) ) ) if determinant == 0: raise ValueError("""This matrix has no inverse.""" ) # Creating cofactor matrix lowerCAmelCase : Dict = [ [d(0.0 ), d(0.0 ), d(0.0 )], [d(0.0 ), d(0.0 ), d(0.0 )], [d(0.0 ), d(0.0 ), d(0.0 )], ] lowerCAmelCase : Optional[Any] = (d(matrix[1][1] ) * d(matrix[2][2] )) - ( d(matrix[1][2] ) * d(matrix[2][1] ) ) lowerCAmelCase : int = -( (d(matrix[1][0] ) * d(matrix[2][2] )) - (d(matrix[1][2] ) * d(matrix[2][0] )) ) lowerCAmelCase : Any = (d(matrix[1][0] ) * d(matrix[2][1] )) - ( d(matrix[1][1] ) * d(matrix[2][0] ) ) lowerCAmelCase : List[Any] = -( (d(matrix[0][1] ) * d(matrix[2][2] )) - (d(matrix[0][2] ) * d(matrix[2][1] )) ) lowerCAmelCase : int = (d(matrix[0][0] ) * d(matrix[2][2] )) - ( d(matrix[0][2] ) * d(matrix[2][0] ) ) lowerCAmelCase : Any = -( (d(matrix[0][0] ) * d(matrix[2][1] )) - (d(matrix[0][1] ) * d(matrix[2][0] )) ) lowerCAmelCase : Any = (d(matrix[0][1] ) * d(matrix[1][2] )) - ( d(matrix[0][2] ) * d(matrix[1][1] ) ) lowerCAmelCase : List[Any] = -( (d(matrix[0][0] ) * d(matrix[1][2] )) - (d(matrix[0][2] ) * d(matrix[1][0] )) ) lowerCAmelCase : Tuple = (d(matrix[0][0] ) * d(matrix[1][1] )) - ( d(matrix[0][1] ) * d(matrix[1][0] ) ) # Transpose the cofactor matrix (Adjoint matrix) lowerCAmelCase : Dict = array(_lowerCamelCase ) for i in range(3 ): for j in range(3 ): lowerCAmelCase : Any = cofactor_matrix[j][i] # Inverse of the matrix using the formula (1/determinant) * adjoint matrix lowerCAmelCase : int = array(_lowerCamelCase ) for i in range(3 ): for j in range(3 ): inverse_matrix[i][j] /= d(_lowerCamelCase ) # Calculate the inverse of the matrix return [[float(d(_lowerCamelCase ) ) or 0.0 for n in row] for row in inverse_matrix] raise ValueError("""Please provide a matrix of size 2x2 or 3x3.""" )
700
from collections import OrderedDict from typing import Mapping from packaging import version from ...configuration_utils import PretrainedConfig from ...onnx import OnnxConfig from ...utils import logging from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices lowerCAmelCase : int =logging.get_logger(__name__) lowerCAmelCase : List[Any] ={ 'microsoft/swin-tiny-patch4-window7-224': ( 'https://huggingface.co/microsoft/swin-tiny-patch4-window7-224/resolve/main/config.json' ), # See all Swin models at https://huggingface.co/models?filter=swin } class _a ( snake_case_ , snake_case_ ): _UpperCamelCase: int = "swin" _UpperCamelCase: str = { "num_attention_heads": "num_heads", "num_hidden_layers": "num_layers", } def __init__( self , lowercase_=224 , lowercase_=4 , lowercase_=3 , lowercase_=96 , lowercase_=[2, 2, 6, 2] , lowercase_=[3, 6, 12, 24] , lowercase_=7 , lowercase_=4.0 , lowercase_=True , lowercase_=0.0 , lowercase_=0.0 , lowercase_=0.1 , lowercase_="gelu" , lowercase_=False , lowercase_=0.0_2 , lowercase_=1e-5 , lowercase_=32 , lowercase_=None , lowercase_=None , **lowercase_ , ) -> Tuple: super().__init__(**lowercase_ ) lowerCAmelCase : Optional[int] = image_size lowerCAmelCase : Optional[Any] = patch_size lowerCAmelCase : Optional[Any] = num_channels lowerCAmelCase : List[Any] = embed_dim lowerCAmelCase : str = depths lowerCAmelCase : List[str] = len(lowercase_ ) lowerCAmelCase : Any = num_heads lowerCAmelCase : str = window_size lowerCAmelCase : List[str] = mlp_ratio lowerCAmelCase : List[Any] = qkv_bias lowerCAmelCase : List[str] = hidden_dropout_prob lowerCAmelCase : int = attention_probs_dropout_prob lowerCAmelCase : Any = drop_path_rate lowerCAmelCase : int = hidden_act lowerCAmelCase : int = use_absolute_embeddings lowerCAmelCase : Dict = layer_norm_eps lowerCAmelCase : Any = initializer_range lowerCAmelCase : Dict = encoder_stride # we set the hidden_size attribute in order to make Swin work with VisionEncoderDecoderModel # this indicates the channel dimension after the last stage of the model lowerCAmelCase : Any = int(embed_dim * 2 ** (len(lowercase_ ) - 1) ) lowerCAmelCase : Dict = ["""stem"""] + [f"""stage{idx}""" for idx in range(1 , len(lowercase_ ) + 1 )] lowerCAmelCase , lowerCAmelCase : Optional[Any] = get_aligned_output_features_output_indices( out_features=lowercase_ , out_indices=lowercase_ , stage_names=self.stage_names ) class _a ( snake_case_ ): _UpperCamelCase: int = version.parse("1.11" ) @property def _snake_case ( self ) -> Mapping[str, Mapping[int, str]]: return OrderedDict( [ ("""pixel_values""", {0: """batch""", 1: """num_channels""", 2: """height""", 3: """width"""}), ] ) @property def _snake_case ( self ) -> float: return 1e-4
693
0
from __future__ import annotations from typing import Any class _a : def __init__( self , lowercase_ = 6 ) -> Optional[int]: lowerCAmelCase : int = None lowerCAmelCase : Optional[int] = None self.create_linked_list(lowercase_ ) def _snake_case ( self , lowercase_ ) -> str: lowerCAmelCase : Optional[int] = Node() lowerCAmelCase : int = current_node lowerCAmelCase : Tuple = current_node lowerCAmelCase : Union[str, Any] = current_node for _ in range(1 , lowercase_ ): lowerCAmelCase : str = Node() lowerCAmelCase : Optional[Any] = current_node lowerCAmelCase : Optional[Any] = previous_node lowerCAmelCase : Optional[Any] = current_node lowerCAmelCase : Optional[int] = self.front lowerCAmelCase : Any = previous_node def _snake_case ( self ) -> Optional[Any]: return ( self.front == self.rear and self.front is not None and self.front.data is None ) def _snake_case ( self ) -> List[Any]: self.check_can_perform_operation() return self.front.data if self.front else None def _snake_case ( self , lowercase_ ) -> Optional[Any]: if self.rear is None: return self.check_is_full() if not self.is_empty(): lowerCAmelCase : List[str] = self.rear.next if self.rear: lowerCAmelCase : Dict = data def _snake_case ( self ) -> str: self.check_can_perform_operation() if self.rear is None or self.front is None: return None if self.front == self.rear: lowerCAmelCase : List[Any] = self.front.data lowerCAmelCase : Optional[int] = None return data lowerCAmelCase : List[str] = self.front lowerCAmelCase : Dict = old_front.next lowerCAmelCase : Any = old_front.data lowerCAmelCase : Optional[Any] = None return data def _snake_case ( self ) -> Union[str, Any]: if self.is_empty(): raise Exception("""Empty Queue""" ) def _snake_case ( self ) -> Optional[Any]: if self.rear and self.rear.next == self.front: raise Exception("""Full Queue""" ) class _a : def __init__( self ) -> Tuple: lowerCAmelCase : Optional[Any] = None lowerCAmelCase : Optional[Any] = None lowerCAmelCase : Tuple = None if __name__ == "__main__": import doctest doctest.testmod()
701
lowerCAmelCase : str ={ 'Pillow': 'Pillow<10.0.0', 'accelerate': 'accelerate>=0.20.3', 'av': 'av==9.2.0', 'beautifulsoup4': 'beautifulsoup4', 'black': 'black~=23.1', 'codecarbon': 'codecarbon==1.2.0', 'cookiecutter': 'cookiecutter==1.7.3', 'dataclasses': 'dataclasses', 'datasets': 'datasets!=2.5.0', 'decord': 'decord==0.6.0', 'deepspeed': 'deepspeed>=0.9.3', 'diffusers': 'diffusers', 'dill': 'dill<0.3.5', 'evaluate': 'evaluate>=0.2.0', 'fairscale': 'fairscale>0.3', 'faiss-cpu': 'faiss-cpu', 'fastapi': 'fastapi', 'filelock': 'filelock', 'flax': 'flax>=0.4.1,<=0.7.0', 'ftfy': 'ftfy', 'fugashi': 'fugashi>=1.0', 'GitPython': 'GitPython<3.1.19', 'hf-doc-builder': 'hf-doc-builder>=0.3.0', 'huggingface-hub': 'huggingface-hub>=0.14.1,<1.0', 'importlib_metadata': 'importlib_metadata', 'ipadic': 'ipadic>=1.0.0,<2.0', 'isort': 'isort>=5.5.4', 'jax': 'jax>=0.2.8,!=0.3.2,<=0.4.13', 'jaxlib': 'jaxlib>=0.1.65,<=0.4.13', 'jieba': 'jieba', 'kenlm': 'kenlm', 'keras-nlp': 'keras-nlp>=0.3.1', 'librosa': 'librosa', 'nltk': 'nltk', 'natten': 'natten>=0.14.6', 'numpy': 'numpy>=1.17', 'onnxconverter-common': 'onnxconverter-common', 'onnxruntime-tools': 'onnxruntime-tools>=1.4.2', 'onnxruntime': 'onnxruntime>=1.4.0', 'opencv-python': 'opencv-python', 'optuna': 'optuna', 'optax': 'optax>=0.0.8,<=0.1.4', 'packaging': 'packaging>=20.0', 'parameterized': 'parameterized', 'phonemizer': 'phonemizer', 'protobuf': 'protobuf', 'psutil': 'psutil', 'pyyaml': 'pyyaml>=5.1', 'pydantic': 'pydantic<2', 'pytest': 'pytest>=7.2.0', 'pytest-timeout': 'pytest-timeout', 'pytest-xdist': 'pytest-xdist', 'python': 'python>=3.8.0', 'ray[tune]': 'ray[tune]', 'regex': 'regex!=2019.12.17', 'requests': 'requests', 'rhoknp': 'rhoknp>=1.1.0,<1.3.1', 'rjieba': 'rjieba', 'rouge-score': 'rouge-score!=0.0.7,!=0.0.8,!=0.1,!=0.1.1', 'ruff': 'ruff>=0.0.241,<=0.0.259', 'sacrebleu': 'sacrebleu>=1.4.12,<2.0.0', 'sacremoses': 'sacremoses', 'safetensors': 'safetensors>=0.3.1', 'sagemaker': 'sagemaker>=2.31.0', 'scikit-learn': 'scikit-learn', 'sentencepiece': 'sentencepiece>=0.1.91,!=0.1.92', 'sigopt': 'sigopt', 'starlette': 'starlette', 'sudachipy': 'sudachipy>=0.6.6', 'sudachidict_core': 'sudachidict_core>=20220729', 'tensorflow-cpu': 'tensorflow-cpu>=2.6,<2.14', 'tensorflow': 'tensorflow>=2.6,<2.14', 'tensorflow-text': 'tensorflow-text<2.14', 'tf2onnx': 'tf2onnx', 'timeout-decorator': 'timeout-decorator', 'timm': 'timm', 'tokenizers': 'tokenizers>=0.11.1,!=0.11.3,<0.14', 'torch': 'torch>=1.9,!=1.12.0', 'torchaudio': 'torchaudio', 'torchvision': 'torchvision', 'pyctcdecode': 'pyctcdecode>=0.4.0', 'tqdm': 'tqdm>=4.27', 'unidic': 'unidic>=1.0.2', 'unidic_lite': 'unidic_lite>=1.0.7', 'urllib3': 'urllib3<2.0.0', 'uvicorn': 'uvicorn', }
693
0
from collections.abc import Callable import numpy as np def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : Any = int(np.ceil((x_end - xa) / step_size ) ) lowerCAmelCase : Tuple = np.zeros((n + 1,) ) lowerCAmelCase : Union[str, Any] = ya lowerCAmelCase : str = xa for k in range(SCREAMING_SNAKE_CASE_ ): lowerCAmelCase : List[Any] = y[k] + step_size * ode_func(SCREAMING_SNAKE_CASE_ ,y[k] ) lowerCAmelCase : Optional[Any] = y[k] + ( (step_size / 2) * (ode_func(SCREAMING_SNAKE_CASE_ ,y[k] ) + ode_func(x + step_size ,SCREAMING_SNAKE_CASE_ )) ) x += step_size return y if __name__ == "__main__": import doctest doctest.testmod()
702
from typing import TYPE_CHECKING from ...utils import ( OptionalDependencyNotAvailable, _LazyModule, is_flax_available, is_tf_available, is_tokenizers_available, is_torch_available, ) lowerCAmelCase : Union[str, Any] ={ 'configuration_roformer': ['ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'RoFormerConfig', 'RoFormerOnnxConfig'], 'tokenization_roformer': ['RoFormerTokenizer'], } try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Dict =['RoFormerTokenizerFast'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Optional[int] =[ 'ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'RoFormerForCausalLM', 'RoFormerForMaskedLM', 'RoFormerForMultipleChoice', 'RoFormerForQuestionAnswering', 'RoFormerForSequenceClassification', 'RoFormerForTokenClassification', 'RoFormerLayer', 'RoFormerModel', 'RoFormerPreTrainedModel', 'load_tf_weights_in_roformer', ] try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'TFRoFormerForCausalLM', 'TFRoFormerForMaskedLM', 'TFRoFormerForMultipleChoice', 'TFRoFormerForQuestionAnswering', 'TFRoFormerForSequenceClassification', 'TFRoFormerForTokenClassification', 'TFRoFormerLayer', 'TFRoFormerModel', 'TFRoFormerPreTrainedModel', ] try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : int =[ 'FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'FlaxRoFormerForMaskedLM', 'FlaxRoFormerForMultipleChoice', 'FlaxRoFormerForQuestionAnswering', 'FlaxRoFormerForSequenceClassification', 'FlaxRoFormerForTokenClassification', 'FlaxRoFormerModel', 'FlaxRoFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_roformer import ROFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, RoFormerConfig, RoFormerOnnxConfig from .tokenization_roformer import RoFormerTokenizer try: if not is_tokenizers_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .tokenization_roformer_fast import RoFormerTokenizerFast try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_roformer import ( ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, RoFormerForCausalLM, RoFormerForMaskedLM, RoFormerForMultipleChoice, RoFormerForQuestionAnswering, RoFormerForSequenceClassification, RoFormerForTokenClassification, RoFormerLayer, RoFormerModel, RoFormerPreTrainedModel, load_tf_weights_in_roformer, ) try: if not is_tf_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_tf_roformer import ( TF_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, TFRoFormerForCausalLM, TFRoFormerForMaskedLM, TFRoFormerForMultipleChoice, TFRoFormerForQuestionAnswering, TFRoFormerForSequenceClassification, TFRoFormerForTokenClassification, TFRoFormerLayer, TFRoFormerModel, TFRoFormerPreTrainedModel, ) try: if not is_flax_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_flax_roformer import ( FLAX_ROFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, FlaxRoFormerForMaskedLM, FlaxRoFormerForMultipleChoice, FlaxRoFormerForQuestionAnswering, FlaxRoFormerForSequenceClassification, FlaxRoFormerForTokenClassification, FlaxRoFormerModel, FlaxRoFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Tuple =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
693
0
from ...configuration_utils import PretrainedConfig from ...utils import logging lowerCAmelCase : int =logging.get_logger(__name__) lowerCAmelCase : Optional[int] ={ 'SCUT-DLVCLab/lilt-roberta-en-base': ( 'https://huggingface.co/SCUT-DLVCLab/lilt-roberta-en-base/resolve/main/config.json' ), } class _a ( snake_case_ ): _UpperCamelCase: List[str] = "lilt" def __init__( self , lowercase_=30522 , lowercase_=768 , lowercase_=12 , lowercase_=12 , lowercase_=3072 , lowercase_="gelu" , lowercase_=0.1 , lowercase_=0.1 , lowercase_=512 , lowercase_=2 , lowercase_=0.0_2 , lowercase_=1e-12 , lowercase_=0 , lowercase_="absolute" , lowercase_=None , lowercase_=4 , lowercase_=1024 , **lowercase_ , ) -> List[Any]: super().__init__(pad_token_id=lowercase_ , **lowercase_ ) lowerCAmelCase : Dict = vocab_size lowerCAmelCase : List[str] = hidden_size lowerCAmelCase : Union[str, Any] = num_hidden_layers lowerCAmelCase : Optional[Any] = num_attention_heads lowerCAmelCase : List[Any] = hidden_act lowerCAmelCase : Optional[Any] = intermediate_size lowerCAmelCase : Optional[Any] = hidden_dropout_prob lowerCAmelCase : Optional[int] = attention_probs_dropout_prob lowerCAmelCase : List[str] = max_position_embeddings lowerCAmelCase : Dict = type_vocab_size lowerCAmelCase : Optional[int] = initializer_range lowerCAmelCase : Optional[Any] = layer_norm_eps lowerCAmelCase : List[Any] = position_embedding_type lowerCAmelCase : Optional[int] = classifier_dropout lowerCAmelCase : Dict = channel_shrink_ratio lowerCAmelCase : Union[str, Any] = max_ad_position_embeddings
703
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ,SCREAMING_SNAKE_CASE__ ): '''simple docstring''' return int(input_a == input_a == 0 ) def _UpperCAmelCase ( ): '''simple docstring''' print("""Truth Table of NOR Gate:""" ) print("""| Input 1 | Input 2 | Output |""" ) print(F"""| 0 | 0 | {nor_gate(0 ,0 )} |""" ) print(F"""| 0 | 1 | {nor_gate(0 ,1 )} |""" ) print(F"""| 1 | 0 | {nor_gate(1 ,0 )} |""" ) print(F"""| 1 | 1 | {nor_gate(1 ,1 )} |""" ) if __name__ == "__main__": import doctest doctest.testmod() main()
693
0
def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : int = len(__lowercase ) lowerCAmelCase : List[Any] = sum(__lowercase ) lowerCAmelCase : List[str] = [[False for x in range(s + 1 )] for y in range(n + 1 )] for i in range(1 ,n + 1 ): lowerCAmelCase : Optional[Any] = True for i in range(1 ,s + 1 ): lowerCAmelCase : Tuple = False for i in range(1 ,n + 1 ): for j in range(1 ,s + 1 ): lowerCAmelCase : Dict = dp[i][j - 1] if arr[i - 1] <= j: lowerCAmelCase : Dict = dp[i][j] or dp[i - 1][j - arr[i - 1]] for j in range(int(s / 2 ) ,-1 ,-1 ): if dp[n][j] is True: lowerCAmelCase : List[Any] = s - 2 * j break return diff
704
from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available, is_vision_available lowerCAmelCase : int ={ 'configuration_poolformer': [ 'POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP', 'PoolFormerConfig', 'PoolFormerOnnxConfig', ] } try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : List[Any] =['PoolFormerFeatureExtractor'] lowerCAmelCase : List[str] =['PoolFormerImageProcessor'] try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : Tuple =[ 'POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST', 'PoolFormerForImageClassification', 'PoolFormerModel', 'PoolFormerPreTrainedModel', ] if TYPE_CHECKING: from .configuration_poolformer import ( POOLFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP, PoolFormerConfig, PoolFormerOnnxConfig, ) try: if not is_vision_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .feature_extraction_poolformer import PoolFormerFeatureExtractor from .image_processing_poolformer import PoolFormerImageProcessor try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_poolformer import ( POOLFORMER_PRETRAINED_MODEL_ARCHIVE_LIST, PoolFormerForImageClassification, PoolFormerModel, PoolFormerPreTrainedModel, ) else: import sys lowerCAmelCase : Any =_LazyModule(__name__, globals()['__file__'], _import_structure)
693
0
# Copyright 2023 The HuggingFace Team. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from typing import TYPE_CHECKING from ...utils import OptionalDependencyNotAvailable, _LazyModule, is_torch_available lowerCAmelCase : int ={ 'configuration_xmod': [ 'XMOD_PRETRAINED_CONFIG_ARCHIVE_MAP', 'XmodConfig', 'XmodOnnxConfig', ], } try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: lowerCAmelCase : str =[ 'XMOD_PRETRAINED_MODEL_ARCHIVE_LIST', 'XmodForCausalLM', 'XmodForMaskedLM', 'XmodForMultipleChoice', 'XmodForQuestionAnswering', 'XmodForSequenceClassification', 'XmodForTokenClassification', 'XmodModel', 'XmodPreTrainedModel', ] if TYPE_CHECKING: from .configuration_xmod import XMOD_PRETRAINED_CONFIG_ARCHIVE_MAP, XmodConfig, XmodOnnxConfig try: if not is_torch_available(): raise OptionalDependencyNotAvailable() except OptionalDependencyNotAvailable: pass else: from .modeling_xmod import ( XMOD_PRETRAINED_MODEL_ARCHIVE_LIST, XmodForCausalLM, XmodForMaskedLM, XmodForMultipleChoice, XmodForQuestionAnswering, XmodForSequenceClassification, XmodForTokenClassification, XmodModel, XmodPreTrainedModel, ) else: import sys lowerCAmelCase : Union[str, Any] =_LazyModule(__name__, globals()['__file__'], _import_structure, module_spec=__spec__)
705
import os import string import sys lowerCAmelCase : Optional[int] =1 << 8 lowerCAmelCase : List[Any] ={ 'tab': ord('\t'), 'newline': ord('\r'), 'esc': 27, 'up': 65 + ARROW_KEY_FLAG, 'down': 66 + ARROW_KEY_FLAG, 'right': 67 + ARROW_KEY_FLAG, 'left': 68 + ARROW_KEY_FLAG, 'mod_int': 91, 'undefined': sys.maxsize, 'interrupt': 3, 'insert': 50, 'delete': 51, 'pg_up': 53, 'pg_down': 54, } lowerCAmelCase : Optional[Any] =KEYMAP['up'] lowerCAmelCase : Tuple =KEYMAP['left'] if sys.platform == "win32": lowerCAmelCase : Dict =[] lowerCAmelCase : int ={ b'\xe0H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\x00H': KEYMAP['up'] - ARROW_KEY_FLAG, b'\xe0P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\x00P': KEYMAP['down'] - ARROW_KEY_FLAG, b'\xe0M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\x00M': KEYMAP['right'] - ARROW_KEY_FLAG, b'\xe0K': KEYMAP['left'] - ARROW_KEY_FLAG, b'\x00K': KEYMAP['left'] - ARROW_KEY_FLAG, } for i in range(10): lowerCAmelCase : Optional[Any] =ord(str(i)) def _UpperCAmelCase ( ): '''simple docstring''' if os.name == "nt": import msvcrt lowerCAmelCase : Any = """mbcs""" # Flush the keyboard buffer while msvcrt.kbhit(): msvcrt.getch() if len(SCREAMING_SNAKE_CASE__ ) == 0: # Read the keystroke lowerCAmelCase : int = msvcrt.getch() # If it is a prefix char, get second part if ch in (b"\x00", b"\xe0"): lowerCAmelCase : Tuple = ch + msvcrt.getch() # Translate actual Win chars to bullet char types try: lowerCAmelCase : str = chr(WIN_KEYMAP[cha] ) WIN_CH_BUFFER.append(chr(KEYMAP["""mod_int"""] ) ) WIN_CH_BUFFER.append(SCREAMING_SNAKE_CASE__ ) if ord(SCREAMING_SNAKE_CASE__ ) in ( KEYMAP["insert"] - 1 << 9, KEYMAP["delete"] - 1 << 9, KEYMAP["pg_up"] - 1 << 9, KEYMAP["pg_down"] - 1 << 9, ): WIN_CH_BUFFER.append(chr(1_2_6 ) ) lowerCAmelCase : Optional[Any] = chr(KEYMAP["""esc"""] ) except KeyError: lowerCAmelCase : Optional[int] = cha[1] else: lowerCAmelCase : Any = ch.decode(SCREAMING_SNAKE_CASE__ ) else: lowerCAmelCase : Optional[int] = WIN_CH_BUFFER.pop(0 ) elif os.name == "posix": import termios import tty lowerCAmelCase : List[Any] = sys.stdin.fileno() lowerCAmelCase : str = termios.tcgetattr(SCREAMING_SNAKE_CASE__ ) try: tty.setraw(SCREAMING_SNAKE_CASE__ ) lowerCAmelCase : Optional[Any] = sys.stdin.read(1 ) finally: termios.tcsetattr(SCREAMING_SNAKE_CASE__ ,termios.TCSADRAIN ,SCREAMING_SNAKE_CASE__ ) return ch def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) in [KEYMAP["interrupt"], KEYMAP["newline"]]: return char elif ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["esc"]: lowerCAmelCase : int = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) == KEYMAP["mod_int"]: lowerCAmelCase : Tuple = get_raw_chars() if ord(SCREAMING_SNAKE_CASE__ ) >= KEYMAP["arrow_begin"] - ARROW_KEY_FLAG and ord(SCREAMING_SNAKE_CASE__ ) <= KEYMAP["arrow_end"] - ARROW_KEY_FLAG: return chr(ord(SCREAMING_SNAKE_CASE__ ) + ARROW_KEY_FLAG ) else: return KEYMAP["undefined"] else: return get_raw_chars() else: if char in string.printable: return char else: return KEYMAP["undefined"]
693
0
# This code is adapted from OpenAI's release # https://github.com/openai/human-eval/blob/master/human_eval/execution.py import contextlib import faulthandler import io import multiprocessing import os import platform import signal import tempfile def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): '''simple docstring''' lowerCAmelCase : str = multiprocessing.Manager() lowerCAmelCase : Union[str, Any] = manager.list() lowerCAmelCase : Union[str, Any] = multiprocessing.Process(target=lowerCAmelCase__ , args=(check_program, result, timeout) ) p.start() p.join(timeout=timeout + 1 ) if p.is_alive(): p.kill() if not result: result.append("""timed out""" ) return { "task_id": task_id, "passed": result[0] == "passed", "result": result[0], "completion_id": completion_id, } def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): '''simple docstring''' with create_tempdir(): # These system calls are needed when cleaning up tempdir. import os import shutil lowerCAmelCase : Tuple = shutil.rmtree lowerCAmelCase : Tuple = os.rmdir lowerCAmelCase : Union[str, Any] = os.chdir # Disable functionalities that can make destructive changes to the test. reliability_guard() # Run program. try: lowerCAmelCase : Any = {} with swallow_io(): with time_limit(lowerCAmelCase__ ): exec(lowerCAmelCase__ , lowerCAmelCase__ ) result.append("""passed""" ) except TimeoutException: result.append("""timed out""" ) except BaseException as e: result.append(F"""failed: {e}""" ) # Needed for cleaning up. lowerCAmelCase : Optional[Any] = rmtree lowerCAmelCase : int = rmdir lowerCAmelCase : Union[str, Any] = chdir @contextlib.contextmanager def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' def signal_handler(SCREAMING_SNAKE_CASE__ , SCREAMING_SNAKE_CASE__ ): raise TimeoutException("""Timed out!""" ) signal.setitimer(signal.ITIMER_REAL , lowerCAmelCase__ ) signal.signal(signal.SIGALRM , lowerCAmelCase__ ) try: yield finally: signal.setitimer(signal.ITIMER_REAL , 0 ) @contextlib.contextmanager def _UpperCAmelCase ( ): '''simple docstring''' lowerCAmelCase : Any = WriteOnlyStringIO() with contextlib.redirect_stdout(lowerCAmelCase__ ): with contextlib.redirect_stderr(lowerCAmelCase__ ): with redirect_stdin(lowerCAmelCase__ ): yield @contextlib.contextmanager def _UpperCAmelCase ( ): '''simple docstring''' with tempfile.TemporaryDirectory() as dirname: with chdir(lowerCAmelCase__ ): yield dirname class _a ( __a ): pass class _a ( io.StringIO ): def _snake_case ( self , *lowercase_ , **lowercase_ ) -> Tuple: raise OSError def _snake_case ( self , *lowercase_ , **lowercase_ ) -> Union[str, Any]: raise OSError def _snake_case ( self , *lowercase_ , **lowercase_ ) -> Tuple: raise OSError def _snake_case ( self , *lowercase_ , **lowercase_ ) -> Any: return False class _a ( contextlib._RedirectStream ): # type: ignore _UpperCamelCase: List[Any] = '''stdin''' @contextlib.contextmanager def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__ ): '''simple docstring''' if root == ".": yield return lowerCAmelCase : str = os.getcwd() os.chdir(lowerCAmelCase__ ) try: yield except BaseException as exc: raise exc finally: os.chdir(lowerCAmelCase__ ) def _UpperCAmelCase ( SCREAMING_SNAKE_CASE__=None ): '''simple docstring''' if maximum_memory_bytes is not None: import resource resource.setrlimit(resource.RLIMIT_AS , (maximum_memory_bytes, maximum_memory_bytes) ) resource.setrlimit(resource.RLIMIT_DATA , (maximum_memory_bytes, maximum_memory_bytes) ) if not platform.uname().system == "Darwin": resource.setrlimit(resource.RLIMIT_STACK , (maximum_memory_bytes, maximum_memory_bytes) ) faulthandler.disable() import builtins lowerCAmelCase : Union[str, Any] = None lowerCAmelCase : List[str] = None import os lowerCAmelCase : Dict = """1""" lowerCAmelCase : int = None lowerCAmelCase : List[Any] = None lowerCAmelCase : List[Any] = None lowerCAmelCase : Dict = None lowerCAmelCase : Optional[int] = None lowerCAmelCase : Union[str, Any] = None lowerCAmelCase : Any = None lowerCAmelCase : Any = None lowerCAmelCase : List[Any] = None lowerCAmelCase : List[Any] = None lowerCAmelCase : List[Any] = None lowerCAmelCase : int = None lowerCAmelCase : List[str] = None lowerCAmelCase : str = None lowerCAmelCase : Any = None lowerCAmelCase : Tuple = None lowerCAmelCase : List[str] = None lowerCAmelCase : int = None lowerCAmelCase : Tuple = None lowerCAmelCase : Tuple = None lowerCAmelCase : List[str] = None lowerCAmelCase : Any = None lowerCAmelCase : int = None lowerCAmelCase : Optional[int] = None lowerCAmelCase : List[Any] = None lowerCAmelCase : int = None lowerCAmelCase : List[str] = None import shutil lowerCAmelCase : Any = None lowerCAmelCase : Optional[int] = None lowerCAmelCase : Union[str, Any] = None import subprocess lowerCAmelCase : List[str] = None # type: ignore lowerCAmelCase : str = None import sys lowerCAmelCase : Optional[Any] = None lowerCAmelCase : Dict = None lowerCAmelCase : str = None lowerCAmelCase : Optional[int] = None lowerCAmelCase : Dict = None
706
# Imports import numpy as np class _a : def __init__( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> List[Any]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) def _snake_case ( self , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Union[str, Any]: if red is not None: lowerCAmelCase : str = red if green is not None: lowerCAmelCase : Optional[int] = green if blue is not None: lowerCAmelCase : Optional[int] = blue if red_edge is not None: lowerCAmelCase : Tuple = red_edge if nir is not None: lowerCAmelCase : Union[str, Any] = nir return True def _snake_case ( self , lowercase_="" , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None , lowercase_=None ) -> Optional[int]: self.set_matricies(red=lowercase_ , green=lowercase_ , blue=lowercase_ , red_edge=lowercase_ , nir=lowercase_ ) lowerCAmelCase : int = { """ARVI2""": self.arvaa, """CCCI""": self.ccci, """CVI""": self.cvi, """GLI""": self.gli, """NDVI""": self.ndvi, """BNDVI""": self.bndvi, """redEdgeNDVI""": self.red_edge_ndvi, """GNDVI""": self.gndvi, """GBNDVI""": self.gbndvi, """GRNDVI""": self.grndvi, """RBNDVI""": self.rbndvi, """PNDVI""": self.pndvi, """ATSAVI""": self.atsavi, """BWDRVI""": self.bwdrvi, """CIgreen""": self.ci_green, """CIrededge""": self.ci_rededge, """CI""": self.ci, """CTVI""": self.ctvi, """GDVI""": self.gdvi, """EVI""": self.evi, """GEMI""": self.gemi, """GOSAVI""": self.gosavi, """GSAVI""": self.gsavi, """Hue""": self.hue, """IVI""": self.ivi, """IPVI""": self.ipvi, """I""": self.i, """RVI""": self.rvi, """MRVI""": self.mrvi, """MSAVI""": self.m_savi, """NormG""": self.norm_g, """NormNIR""": self.norm_nir, """NormR""": self.norm_r, """NGRDI""": self.ngrdi, """RI""": self.ri, """S""": self.s, """IF""": self._if, """DVI""": self.dvi, """TVI""": self.tvi, """NDRE""": self.ndre, } try: return funcs[index]() except KeyError: print("""Index not in the list!""" ) return False def _snake_case ( self ) -> Dict: return -0.1_8 + (1.1_7 * ((self.nir - self.red) / (self.nir + self.red))) def _snake_case ( self ) -> Optional[Any]: return ((self.nir - self.redEdge) / (self.nir + self.redEdge)) / ( (self.nir - self.red) / (self.nir + self.red) ) def _snake_case ( self ) -> List[str]: return self.nir * (self.red / (self.green**2)) def _snake_case ( self ) -> Tuple: return (2 * self.green - self.red - self.blue) / ( 2 * self.green + self.red + self.blue ) def _snake_case ( self ) -> Optional[int]: return (self.nir - self.red) / (self.nir + self.red) def _snake_case ( self ) -> List[str]: return (self.nir - self.blue) / (self.nir + self.blue) def _snake_case ( self ) -> int: return (self.redEdge - self.red) / (self.redEdge + self.red) def _snake_case ( self ) -> Optional[Any]: return (self.nir - self.green) / (self.nir + self.green) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.blue)) / ( self.nir + (self.green + self.blue) ) def _snake_case ( self ) -> Tuple: return (self.nir - (self.green + self.red)) / ( self.nir + (self.green + self.red) ) def _snake_case ( self ) -> int: return (self.nir - (self.blue + self.red)) / (self.nir + (self.blue + self.red)) def _snake_case ( self ) -> List[str]: return (self.nir - (self.green + self.red + self.blue)) / ( self.nir + (self.green + self.red + self.blue) ) def _snake_case ( self , lowercase_=0.0_8 , lowercase_=1.2_2 , lowercase_=0.0_3 ) -> int: return a * ( (self.nir - a * self.red - b) / (a * self.nir + self.red - a * b + x * (1 + a**2)) ) def _snake_case ( self ) -> Optional[Any]: return (0.1 * self.nir - self.blue) / (0.1 * self.nir + self.blue) def _snake_case ( self ) -> Any: return (self.nir / self.green) - 1 def _snake_case ( self ) -> List[Any]: return (self.nir / self.redEdge) - 1 def _snake_case ( self ) -> str: return (self.red - self.blue) / self.red def _snake_case ( self ) -> Optional[int]: lowerCAmelCase : Dict = self.ndvi() return ((ndvi + 0.5) / (abs(ndvi + 0.5 ))) * (abs(ndvi + 0.5 ) ** (1 / 2)) def _snake_case ( self ) -> Optional[Any]: return self.nir - self.green def _snake_case ( self ) -> int: return 2.5 * ( (self.nir - self.red) / (self.nir + 6 * self.red - 7.5 * self.blue + 1) ) def _snake_case ( self ) -> Optional[Any]: lowerCAmelCase : Tuple = (2 * (self.nir**2 - self.red**2) + 1.5 * self.nir + 0.5 * self.red) / ( self.nir + self.red + 0.5 ) return n * (1 - 0.2_5 * n) - (self.red - 0.1_2_5) / (1 - self.red) def _snake_case ( self , lowercase_=0.1_6 ) -> Optional[int]: return (self.nir - self.green) / (self.nir + self.green + y) def _snake_case ( self , lowercase_=0.5 ) -> List[str]: return ((self.nir - self.green) / (self.nir + self.green + n)) * (1 + n) def _snake_case ( self ) -> Any: return np.arctan( ((2 * self.red - self.green - self.blue) / 3_0.5) * (self.green - self.blue) ) def _snake_case ( self , lowercase_=None , lowercase_=None ) -> List[Any]: return (self.nir - b) / (a * self.red) def _snake_case ( self ) -> Any: return (self.nir / ((self.nir + self.red) / 2)) * (self.ndvi() + 1) def _snake_case ( self ) -> str: return (self.red + self.green + self.blue) / 3_0.5 def _snake_case ( self ) -> Union[str, Any]: return self.nir / self.red def _snake_case ( self ) -> Tuple: return (self.rvi() - 1) / (self.rvi() + 1) def _snake_case ( self ) -> Dict: return ( (2 * self.nir + 1) - ((2 * self.nir + 1) ** 2 - 8 * (self.nir - self.red)) ** (1 / 2) ) / 2 def _snake_case ( self ) -> List[Any]: return self.green / (self.nir + self.red + self.green) def _snake_case ( self ) -> int: return self.nir / (self.nir + self.red + self.green) def _snake_case ( self ) -> Dict: return self.red / (self.nir + self.red + self.green) def _snake_case ( self ) -> List[Any]: return (self.green - self.red) / (self.green + self.red) def _snake_case ( self ) -> Optional[int]: return (self.red - self.green) / (self.red + self.green) def _snake_case ( self ) -> Tuple: lowerCAmelCase : Any = np.max([np.max(self.red ), np.max(self.green ), np.max(self.blue )] ) lowerCAmelCase : Dict = np.min([np.min(self.red ), np.min(self.green ), np.min(self.blue )] ) return (max_value - min_value) / max_value def _snake_case ( self ) -> int: return (2 * self.red - self.green - self.blue) / (self.green - self.blue) def _snake_case ( self ) -> List[str]: return self.nir / self.red def _snake_case ( self ) -> int: return (self.ndvi() + 0.5) ** (1 / 2) def _snake_case ( self ) -> str: return (self.nir - self.redEdge) / (self.nir + self.redEdge)
693
0