<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<oembed>
  <author_name>sosodemonai</author_name>
  <author_url>https://blog.hatena.ne.jp/sosodemonai/</author_url>
  <blog_title>Neunomizuの日記</blog_title>
  <blog_url>https://propyon.hateblo.jp/</blog_url>
  <categories>
    <anon>論文</anon>
  </categories>
  <description>tags: 論文 Title BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding 推測できること trainデータになんらかの加工をするDeep Learningを利用した自然言語処理の新手法に関する論文かな? Abstract アブストのアブスト BERT とは Bidirectional Encoder Representations from Transformers の頭文字を取ったもの. BERTは左右両方の文脈を条件に,&quot;deep bidirectional represe…</description>
  <height>190</height>
  <html>&lt;iframe src=&quot;https://hatenablog-parts.com/embed?url=https%3A%2F%2Fpropyon.hateblo.jp%2Fentry%2F2019%2F08%2F05%2F184102&quot; title=&quot;論文&amp;quot;BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding&amp;quot;を読んだ - Neunomizuの日記&quot; class=&quot;embed-card embed-blogcard&quot; scrolling=&quot;no&quot; frameborder=&quot;0&quot; style=&quot;display: block; width: 100%; height: 190px; max-width: 500px; margin: 10px 0px;&quot;&gt;&lt;/iframe&gt;</html>
  <image_url></image_url>
  <provider_name>Hatena Blog</provider_name>
  <provider_url>https://hatena.blog</provider_url>
  <published>2019-08-05 18:41:02</published>
  <title>論文&quot;BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding&quot;を読んだ</title>
  <type>rich</type>
  <url>https://propyon.hateblo.jp/entry/2019/08/05/184102</url>
  <version>1.0</version>
  <width>100%</width>
</oembed>
