<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9" xmlns:xhtml="http://www.w3.org/1999/xhtml"><url>
    <loc>https://cloudier-papers.netlify.app/table</loc>
    <lastmod>2024-03-13T14:09:01.380Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/words</loc>
    <lastmod>2024-03-13T14:09:01.380Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/tools</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/computer-vision/dataset/beyond-neural-scaling-laws-beating-power-law-scaling-via-data-pruning</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/computer-vision/dataset/deep-learning-on-a-data-diet-finding-important-examples-early-in-training</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/computer-vision/dataset/semdedup-data-efficient-learning-at-web-scale-through-semantic-deduplication</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/d4-improving-llm-pretraining-via-document-de-duplication-and-diversification</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/deduplicaion/lsh-minhash</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/deduplicaion/memorization-without-overfitting-analyzing-the-training-dynamics-of-large-language-models</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/pruning/when-less-is-more-investigating-data-pruning-for-pretraining-llms-at-scale</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/quality/language-models-are-few-shot-learners</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/quality/the-pile-an-800gb-dataset-of-diverse-text-for-language-modeling</loc>
    <lastmod>2024-03-13T14:09:01.428Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/scaling-laws-for-neural-language-models</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/dataset/textbooks-are-all-you-need</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/embedding</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/inference</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/language-models-are-few-shot-learners</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/learning</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/tokenization</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/%eb%a9%94%eb%aa%a8</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%eb%85%bc%eb%ac%b8-%ec%a0%95%eb%a6%ac/language-model/%ed%83%80%ec%9e%84%eb%9d%bc%ec%9d%b8</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%ec%82%ac%ec%a0%84-%ed%95%99%ec%8a%b5%ec%9a%a9-%ed%85%8d%ec%8a%a4%ed%8a%b8-%eb%8d%b0%ec%9d%b4%ed%84%b0%ec%85%8b-%ed%8f%89%ea%b0%80</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%ec%9e%91%ec%97%85%ec%8b%a4/%eb%b2%88%ec%97%ad-%ec%a4%91/ai/deep-contextualized-word-representations</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%ec%9e%91%ec%97%85%ec%8b%a4/%eb%b2%88%ec%97%ad-%ec%a4%91/ai/explaining-how-transformers-use-context-to-build-predictions</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%ec%9e%91%ec%97%85%ec%8b%a4/%eb%b2%88%ec%97%ad-%ec%a4%91/ai/rethinking-the-inception-architecture-for-computer-vision</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url><url>
    <loc>https://cloudier-papers.netlify.app/%ec%9e%91%ec%97%85%ec%8b%a4/%eb%b2%88%ec%97%ad-%ec%a4%91/ai/%ec%9e%91%ec%84%b1-%ec%9a%94%eb%a0%b9</loc>
    <lastmod>2024-03-13T14:09:01.432Z</lastmod>
  </url></urlset>