{"id":924,"date":"2026-04-06T23:16:08","date_gmt":"2026-04-06T23:16:08","guid":{"rendered":"https:\/\/pythonia.fr\/?page_id=924"},"modified":"2026-04-06T23:25:49","modified_gmt":"2026-04-06T23:25:49","slug":"nlp","status":"publish","type":"page","link":"https:\/\/pythonia.fr\/index.php\/nlp\/","title":{"rendered":"NLP"},"content":{"rendered":"\t\t<div data-elementor-type=\"wp-page\" data-elementor-id=\"924\" class=\"elementor elementor-924\">\n\t\t\t\t<div class=\"elementor-element elementor-element-ea6563c e-grid e-con-boxed e-con e-parent\" data-id=\"ea6563c\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;gradient&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t\t\t<div class=\"elementor-element elementor-element-8b2cf1c elementor-widget elementor-widget-heading\" data-id=\"8b2cf1c\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\">Natural Langage Processing<\/h2>\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-ba540fa e-flex e-con-boxed e-con e-parent\" data-id=\"ba540fa\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;gradient&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t\t\t<div class=\"elementor-element elementor-element-e22e52a elementor-widget elementor-widget-heading\" data-id=\"e22e52a\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\"><div style=\"gap:40px;flex-wrap:wrap;font-family:sans-serif\">\n  <!-- Colonne gauche : Objectifs -->\n  <div style=\"flex:2;min-width:300px\">\n    <h3 style=\"color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px;font-size:16px\">\n      Objectifs :\n    <\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:2;margin-left:20px\">\n      <li>Ma\u00eetriser les concepts d'alg\u00e8bre lin\u00e9aire essentiels au NLP (vecteurs, matrices, similarit\u00e9 cosinus)<\/li>\n      <li>Construire des repr\u00e9sentations vectorielles de mots (TF-IDF, Word2Vec, GloVe, FastText)<\/li>\n      <li>Concevoir et entra\u00eener des r\u00e9seaux de neurones pour le NLP avec PyTorch (RNN, LSTM, GRU)<\/li>\n      <li>Construire des mod\u00e8les s\u00e9quentiels avec TensorFlow\/Keras et comprendre l'architecture Transformer<\/li>\n      <li>Fine-tuner des mod\u00e8les pr\u00e9-entra\u00een\u00e9s (BERT, CamemBERT) avec Hugging Face<\/li>\n    <\/ul>\n  <\/div>\n  <!-- Colonne droite : Tarif + liens -->\n  <div style=\"flex:1;min-width:200px;background:#0B1D33;padding:25px;border-radius:12px;height:fit-content\">\n    <div style=\"padding:12px 0;border-bottom:1px solid #1a3a5c\">\n      <span style=\"color:#8899AA;font-size:12px;text-transform:uppercase;letter-spacing:0.5px\">Tarif inter \/ participant<\/span>\n      <div style=\"color:#FFFFFF;font-size:22px;font-weight:bold;margin-top:4px\">\n        2 500 \u20ac <span style=\"font-size:13px;font-weight:normal;color:#8899AA\">HT<\/span>\n      <\/div>\n    <\/div>\n    <a href=\"http:\/\/pythonia.fr\/wp-content\/uploads\/2026\/04\/Programme_NLP_Python_Pythonia.pdf\" target=\"_blank\" style=\"color:#FFFFFF;font-size:15px;text-decoration:none;padding:12px 0;border-bottom:1px solid #1a3a5c\">\n      \ud83d\udcc4 Programme (PDF)\n    <\/a>\n    <a href=\"#\" id=\"btn-dates\" style=\"color:#FFFFFF;font-size:15px;text-decoration:none;padding:12px 0;border-bottom:1px solid #1a3a5c\">\n      \ud83d\udcc5 Voir les dates\n    <\/a>\n    <button id=\"btn-contact\" style=\"width:100%;margin-top:20px;padding:15px 20px;background:#5DADE2;color:#FFFFFF;font-size:15px;font-weight:bold;border:none;border-radius:8px;cursor:pointer\">\n      \u2709\ufe0f Demande d'information\n    <\/button>\n  <\/div>\n<\/div><\/h2>\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t<div class=\"elementor-element elementor-element-424e95b e-grid e-con-boxed e-con e-parent\" data-id=\"424e95b\" data-element_type=\"container\" data-e-type=\"container\" data-settings=\"{&quot;background_background&quot;:&quot;gradient&quot;}\">\n\t\t\t\t\t<div class=\"e-con-inner\">\n\t\t\t\t<div class=\"elementor-element elementor-element-62bb9a0 elementor-widget elementor-widget-heading\" data-id=\"62bb9a0\" data-element_type=\"widget\" data-e-type=\"widget\" data-widget_type=\"heading.default\">\n\t\t\t\t\t<h2 class=\"elementor-heading-title elementor-size-default\"><div style=\"gap:40px;flex-wrap:wrap\">\n\n  <!-- Colonne gauche : Contenu -->\n  <div style=\"flex:2;min-width:300px\">\n\n    <h3 style=\"font-size:24px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">Public vis\u00e9<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>D\u00e9veloppeurs Python souhaitant se sp\u00e9cialiser dans le traitement du langage<\/li>\n      <li>Data scientists et ing\u00e9nieurs machine learning<\/li>\n      <li>Chercheurs et \u00e9tudiants en IA souhaitant ma\u00eetriser les techniques modernes du NLP<\/li>\n      <li>Professionnels souhaitant int\u00e9grer le NLP dans leurs produits ou pipelines de donn\u00e9es<\/li>\n    <\/ul>\n\n    <h3 style=\"font-size:24px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">Objectifs p\u00e9dagogiques<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Ma\u00eetriser les concepts d'alg\u00e8bre lin\u00e9aire essentiels au NLP (vecteurs, matrices, produits scalaires, projections)<\/li>\n      <li>Mettre en \u0153uvre les techniques classiques de pr\u00e9traitement du texte (tokenization, stemming, lemmatisation)<\/li>\n      <li>Construire des repr\u00e9sentations vectorielles de mots (TF-IDF, Word2Vec, GloVe, embeddings contextuels)<\/li>\n      <li>D\u00e9velopper des mod\u00e8les de classification de texte avec scikit-learn<\/li>\n      <li>Concevoir et entra\u00eener des r\u00e9seaux de neurones pour le NLP avec PyTorch<\/li>\n      <li>Construire des mod\u00e8les s\u00e9quentiels (RNN, LSTM, GRU) avec TensorFlow\/Keras<\/li>\n      <li>Comprendre l'architecture des Transformers et utiliser Hugging Face<\/li>\n      <li>D\u00e9velopper une application NLP compl\u00e8te (analyse de sentiment, NER, r\u00e9sum\u00e9 automatique)<\/li>\n    <\/ul>\n\n    <h3 style=\"font-size:24px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">Pr\u00e9requis<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Ma\u00eetrise de Python niveau interm\u00e9diaire : POO, modules, manipulation de fichiers<\/li>\n      <li>Notions de base en math\u00e9matiques : op\u00e9rations vectorielles, fonctions, d\u00e9riv\u00e9es (niveau lyc\u00e9e scientifique)<\/li>\n      <li>Connaissance de pandas et numpy appr\u00e9ci\u00e9e mais non obligatoire<\/li>\n      <li>Disposer d'un ordinateur avec Python 3.x et acc\u00e8s internet<\/li>\n    <\/ul>\n\n    <h3 style=\"font-size:24px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">M\u00e9thodes p\u00e9dagogiques<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Alternance de th\u00e9orie (35%) et de pratique (65%)<\/li>\n      <li>D\u00e9monstrations math\u00e9matiques visuelles et exemples concrets<\/li>\n      <li>TP individuels et collaboratifs avec NumPy, scikit-learn, PyTorch, TensorFlow et Hugging Face<\/li>\n      <li>Comparaison des frameworks : chaque concept est illustr\u00e9 en PyTorch et TensorFlow lorsque pertinent<\/li>\n      <li>Supports de cours et notebooks Jupyter remis aux stagiaires (acc\u00e8s p\u00e9renne)<\/li>\n    <\/ul>\n\n    <h1 style=\"font-size:24px;color:#FFFFFF;font-weight:bold;margin-top:40px;margin-bottom:20px\">Programme d\u00e9taill\u00e9<\/h1>\n\n    <h3 style=\"font-size:16px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">JOUR 1 \u2014 Fondations math\u00e9matiques et premiers pas en NLP<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Vecteurs : d\u00e9finition, dimension, repr\u00e9sentation d'un mot ou d'un document<\/li>\n      <li>Op\u00e9rations vectorielles : addition, multiplication scalaire, norme (L1, L2)<\/li>\n      <li>Produit scalaire et similarit\u00e9 cosinus : mesurer la proximit\u00e9 s\u00e9mantique entre textes<\/li>\n      <li>Matrices : d\u00e9finition, transposition, produit matriciel, transformations lin\u00e9aires<\/li>\n      <li>D\u00e9composition en valeurs singuli\u00e8res (SVD) : principe et application au NLP (LSA)<\/li>\n      <li>Calcul matriciel avec NumPy : ndarray, broadcasting, op\u00e9rations vectoris\u00e9es<\/li>\n      <li>Notions de probabilit\u00e9s utiles : distributions, probabilit\u00e9 conditionnelle, th\u00e9or\u00e8me de Bayes<\/li>\n      <li>Panorama du NLP : enjeux, applications, \u00e9volution historique<\/li>\n      <li>Tokenization, normalisation, stemming et lemmatisation avec NLTK et spaCy<\/li>\n      <li>POS tagging et reconnaissance d'entit\u00e9s nomm\u00e9es (NER)<\/li>\n      <li>Repr\u00e9sentations Bag-of-Words et TF-IDF<\/li>\n    <\/ul>\n    <p style=\"font-size:14px;color:#FFFFFF;margin-left:20px;margin-top:15px\"><strong>\u25a0 Travaux pratiques :<\/strong><br>\n    \u2192 TP1 : Calcul de similarit\u00e9 cosinus entre documents avec NumPy<br>\n    \u2192 TP2 : Pipeline complet de pr\u00e9traitement de texte avec spaCy (tokenization, lemmatisation, NER)<br>\n    \u2192 TP3 : Construction d'une matrice TF-IDF et recherche des documents les plus similaires<\/p>\n\n    <h3 style=\"font-size:16px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">JOUR 2 \u2014 Embeddings et machine learning classique appliqu\u00e9 au NLP<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Limites des repr\u00e9sentations Bag-of-Words : sparsit\u00e9, absence de s\u00e9mantique<\/li>\n      <li>Hypoth\u00e8se distributionnelle : \u00ab un mot se d\u00e9finit par son contexte \u00bb<\/li>\n      <li>Word2Vec : architectures CBOW et Skip-Gram, intuition math\u00e9matique<\/li>\n      <li>GloVe : factorisation de matrice de co-occurrence<\/li>\n      <li>FastText : embeddings de sous-mots, gestion des mots inconnus<\/li>\n      <li>Visualisation des embeddings : r\u00e9duction de dimension avec PCA et t-SNE<\/li>\n      <li>Utilisation de mod\u00e8les pr\u00e9-entra\u00een\u00e9s (fran\u00e7ais et anglais)<\/li>\n      <li>Pipeline de machine learning pour le NLP : pr\u00e9traitement, vectorisation, mod\u00e8le, \u00e9valuation<\/li>\n      <li>Algorithmes classiques : r\u00e9gression logistique, SVM, Naive Bayes, Random Forest<\/li>\n      <li>M\u00e9triques d'\u00e9valuation : pr\u00e9cision, rappel, F1-score, matrice de confusion<\/li>\n      <li>Cross-validation et recherche d'hyperparam\u00e8tres avec GridSearchCV<\/li>\n      <li>Gestion des classes d\u00e9s\u00e9quilibr\u00e9es et interpr\u00e9tabilit\u00e9 des mod\u00e8les<\/li>\n    <\/ul>\n    <p style=\"font-size:14px;color:#FFFFFF;margin-left:20px;margin-top:15px\"><strong>\u25a0 Travaux pratiques :<\/strong><br>\n    \u2192 TP1 : Entra\u00eenement de Word2Vec sur un corpus fran\u00e7ais et exploration des analogies<br>\n    \u2192 TP2 : Classifieur de sentiment sur des avis clients avec scikit-learn (TF-IDF + r\u00e9gression logistique)<br>\n    \u2192 TP3 : D\u00e9tection automatique de spam avec optimisation d'hyperparam\u00e8tres<\/p>\n\n    <h3 style=\"font-size:16px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">JOUR 3 \u2014 Deep learning pour le NLP avec PyTorch<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Pr\u00e9sentation de PyTorch : philosophie, \u00e9cosyst\u00e8me, comparaison avec TensorFlow<\/li>\n      <li>Tenseurs PyTorch : cr\u00e9ation, op\u00e9rations, broadcasting, transfert CPU\/GPU<\/li>\n      <li>Autograd : diff\u00e9rentiation automatique, calcul du gradient<\/li>\n      <li>Construction d'un r\u00e9seau de neurones : torch.nn.Module, couches Linear, fonctions d'activation<\/li>\n      <li>Boucle d'entra\u00eenement : forward, loss, backward, optimizer (SGD, Adam)<\/li>\n      <li>DataLoader et Dataset : chargement efficace des donn\u00e9es textuelles<\/li>\n      <li>Couche Embedding : repr\u00e9sentation dense des mots dans un r\u00e9seau de neurones<\/li>\n      <li>R\u00e9seaux r\u00e9currents (RNN) : intuition, \u00e9quations, limites (vanishing gradient)<\/li>\n      <li>LSTM et GRU : m\u00e9canismes de portes, m\u00e9moire \u00e0 long terme<\/li>\n      <li>RNN bidirectionnels : combiner contexte gauche et droite<\/li>\n      <li>R\u00e9gularisation : dropout, batch normalization, early stopping<\/li>\n    <\/ul>\n    <p style=\"font-size:14px;color:#FFFFFF;margin-left:20px;margin-top:15px\"><strong>\u25a0 Travaux pratiques :<\/strong><br>\n    \u2192 TP1 : Impl\u00e9mentation d'un classifieur de texte avec un r\u00e9seau feed-forward en PyTorch<br>\n    \u2192 TP2 : Construction d'un LSTM pour la classification d'avis clients<br>\n    \u2192 TP3 : Comparaison des performances entre r\u00e9gression logistique, LSTM et GRU sur un m\u00eame dataset<\/p>\n\n    <h3 style=\"font-size:16px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">JOUR 4 \u2014 TensorFlow, Transformers et projet final<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Pr\u00e9sentation de TensorFlow et Keras : philosophie, API s\u00e9quentielle et fonctionnelle<\/li>\n      <li>Construction d'un mod\u00e8le Keras : Sequential, couches, compilation, entra\u00eenement<\/li>\n      <li>Tenseurs TensorFlow : tf.Tensor, op\u00e9rations, eager execution<\/li>\n      <li>Pipeline tf.data : chargement et transformation efficaces de corpus textuels<\/li>\n      <li>Impl\u00e9mentation d'un LSTM en Keras et comparaison avec PyTorch<\/li>\n      <li>M\u00e9canisme d'attention : intuition, calcul, int\u00e9r\u00eat<\/li>\n      <li>Architecture Transformer : self-attention, multi-head attention, encodage positionnel<\/li>\n      <li>BERT, GPT et leurs variantes : pr\u00e9sentation et cas d'usage<\/li>\n      <li>\u00c9cosyst\u00e8me Hugging Face : Transformers, Datasets, Tokenizers, Hub<\/li>\n      <li>Pipeline Hugging Face : classification, NER, question-answering, r\u00e9sum\u00e9<\/li>\n      <li>Fine-tuning d'un mod\u00e8le pr\u00e9-entra\u00een\u00e9 sur un dataset m\u00e9tier<\/li>\n      <li>Bonnes pratiques : gestion de la m\u00e9moire, GPU, taille de batch<\/li>\n    <\/ul>\n    <p style=\"font-size:14px;color:#FFFFFF;margin-left:20px;margin-top:15px\"><strong>\u25a0 Travaux pratiques :<\/strong><br>\n    \u2192 TP1 : Impl\u00e9mentation et entra\u00eenement d'un LSTM en Keras\/TensorFlow sur un corpus textuel<br>\n    \u2192 TP2 : Fine-tuning de CamemBERT pour la classification de sentiment sur des avis en fran\u00e7ais<br>\n    \u2192 TP3 (Projet final) : Application NLP compl\u00e8te au choix \u2014 analyse de sentiment, NER m\u00e9tier, ou r\u00e9sum\u00e9 automatique<\/p>\n\n    <h3 style=\"font-size:16px;color:#FFFFFF;font-weight:bold;margin-top:25px;margin-bottom:10px\">\u00c9valuation<\/h3>\n    <ul style=\"font-size:14px;color:#FFFFFF;line-height:1.8;margin-left:20px\">\n      <li>Test de positionnement en d\u00e9but de formation<\/li>\n      <li>\u00c9valuations formatives : exercices pratiques corrig\u00e9s, revue de code, d\u00e9bogage en groupe, QCM interm\u00e9diaires<\/li>\n      <li>QCM mi-parcours de 20 questions (Jour 2)<\/li>\n      <li>QCM final de 30 questions (Jour 4) \u2014 crit\u00e8re de r\u00e9ussite : 60%<\/li>\n      <li>Soutenance du mini-projet final<\/li>\n      <li>Attestation de fin de formation d\u00e9livr\u00e9e<\/li>\n    <\/ul>\n\n  <\/div>\n<\/div><\/h2>\t\t\t\t<\/div>\n\t\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t\t\t<\/div>\n\t\t","protected":false},"excerpt":{"rendered":"<p>Natural Langage Processing Objectifs : Ma\u00eetriser les concepts d&#8217;alg\u00e8bre lin\u00e9aire essentiels au NLP (vecteurs, matrices, similarit\u00e9 cosinus) Construire des repr\u00e9sentations [&hellip;]<\/p>\n","protected":false},"author":1,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"site-sidebar-layout":"no-sidebar","site-content-layout":"","ast-site-content-layout":"full-width-container","site-content-style":"default","site-sidebar-style":"default","ast-global-header-display":"","ast-banner-title-visibility":"","ast-main-header-display":"","ast-hfb-above-header-display":"","ast-hfb-below-header-display":"","ast-hfb-mobile-header-display":"","site-post-title":"disabled","ast-breadcrumbs-content":"","ast-featured-img":"disabled","footer-sml-layout":"","ast-disable-related-posts":"","theme-transparent-header-meta":"","adv-header-id-meta":"","stick-header-meta":"","header-above-stick-meta":"","header-main-stick-meta":"","header-below-stick-meta":"","astra-migrate-meta-layouts":"default","ast-page-background-enabled":"default","ast-page-background-meta":{"desktop":{"background-color":"var(--ast-global-color-5)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"ast-content-background-meta":{"desktop":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"tablet":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""},"mobile":{"background-color":"var(--ast-global-color-4)","background-image":"","background-repeat":"repeat","background-position":"center center","background-size":"auto","background-attachment":"scroll","background-type":"","background-media":"","overlay-type":"","overlay-color":"","overlay-opacity":"","overlay-gradient":""}},"footnotes":""},"class_list":["post-924","page","type-page","status-publish","hentry"],"_links":{"self":[{"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/pages\/924","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/users\/1"}],"replies":[{"embeddable":true,"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/comments?post=924"}],"version-history":[{"count":7,"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/pages\/924\/revisions"}],"predecessor-version":[{"id":931,"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/pages\/924\/revisions\/931"}],"wp:attachment":[{"href":"https:\/\/pythonia.fr\/index.php\/wp-json\/wp\/v2\/media?parent=924"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}