CV
22/07/21 13:45
PDF copy
library(scholar)
library(tidyverse)
library(glue)
# escape some special chars, german umlauts, ...
char2html <- function(x){
dictionary <- data.frame(
symbol = c("ä","ö","ü","Ä", "Ö", "Ü", "ß"),
html = c("ä","ö", "ü","Ä",
"Ö", "Ü","ß"))
for(i in 1:dim(dictionary)[1]){
x <- gsub(dictionary$symbol[i],dictionary$html[i],x)
}
x
}
# my google scholar user id from my profile url
# https://scholar.google.com/citations?user=b8bWNkUAAAAJ&hl=en
thackl <- "b8bWNkUAAAAJ"
# pull from google
html_1 <- get_publications(thackl)
# convert to htlm table - the ugly way ;)
html_2 <- html_1 %>%
as_tibble %>% arrange(desc(year)) %>%
mutate(
# author=str_replace_all(author, " (\\S) ", "\\1 "),
author=str_replace_all(author, "([A-Z]) ([A-Z]) ", "\\1\\2 "),
author=str_replace_all(author, ", \\.\\.\\.", " et al."),
author=str_replace_all(author, "T Hackl", "T Hackl") # make my name fat
) %>% split(.$year) %>%
map(function(x){
x <- x %>%
glue_data('{author} ({year}) {title}, {journal}, {number}') %>%
str_replace_all("(, )+", "") %>%
char2html()
x <- c('', x, '')
return(x);
}) %>% rev
html_3 <- map2(names(html_2) %>% paste0("", ., "
"), html_2, c) %>% unlist
html_4 <- c(
paste0('Last updated ',
format(Sys.Date(), format="%B %d, %Y"),
'– Pulled automatically from my Google Scholar profile. See this post for how it works.
'), html_3)
# write the html list to a file
writeLines(html_4, "../_includes/publications.html")