diff --git a/assets/js/dashboard/api.js b/assets/js/dashboard/api.js
index ddbb90f79888..6722c52d9049 100644
--- a/assets/js/dashboard/api.js
+++ b/assets/js/dashboard/api.js
@@ -42,6 +42,7 @@ export function serializeQuery(query, extraQuery=[]) {
if (query.from) { queryObj.from = formatISO(query.from) }
if (query.to) { queryObj.to = formatISO(query.to) }
if (query.filters) { queryObj.filters = serializeFilters(query.filters) }
+ if (query.with_imported) { queryObj.with_imported = query.with_imported }
if (SHARED_LINK_AUTH) { queryObj.auth = SHARED_LINK_AUTH }
Object.assign(queryObj, ...extraQuery)
diff --git a/assets/js/dashboard/query.js b/assets/js/dashboard/query.js
index 366d00f263bc..6d68bd2dfbd3 100644
--- a/assets/js/dashboard/query.js
+++ b/assets/js/dashboard/query.js
@@ -23,6 +23,7 @@ export function parseQuery(querystring, site) {
date: q.get('date') ? parseUTCDate(q.get('date')) : nowForSite(site),
from: q.get('from') ? parseUTCDate(q.get('from')) : undefined,
to: q.get('to') ? parseUTCDate(q.get('to')) : undefined,
+ with_imported: q.get('with_imported') ? q.get('with_imported') === 'true' : true,
filters: {
'goal': q.get('goal'),
'props': JSON.parse(q.get('props')),
diff --git a/assets/js/dashboard/stats/visitor-graph.js b/assets/js/dashboard/stats/visitor-graph.js
index fda5453592b6..d6119f43e510 100644
--- a/assets/js/dashboard/stats/visitor-graph.js
+++ b/assets/js/dashboard/stats/visitor-graph.js
@@ -1,10 +1,11 @@
import React from 'react';
-import { withRouter } from 'react-router-dom'
+import { withRouter, Link } from 'react-router-dom'
import Chart from 'chart.js/auto';
import { navigateToQuery } from '../query'
import numberFormatter, {durationFormatter} from '../util/number-formatter'
import * as api from '../api'
import LazyLoader from '../components/lazy-loader'
+import * as url from '../util/url'
function buildDataSet(plot, present_index, ctx, label) {
var gradient = ctx.createLinearGradient(0, 0, 0, 300);
@@ -316,17 +317,19 @@ class LineGraph extends React.Component {
if (this.state.exported) {
return (
-
-
-
-
+
)
} else {
const endpoint = `/${encodeURIComponent(this.props.site.domain)}/export${api.serializeQuery(this.props.query)}`
return (
-
-
+
+
)
}
@@ -338,8 +341,8 @@ class LineGraph extends React.Component {
if (samplePercent < 100) {
return (
-
-
+
@@ -347,6 +350,27 @@ class LineGraph extends React.Component {
}
}
+ importedNotice() {
+ const source = this.props.graphData.imported_source;
+
+ if (source) {
+ const withImported = this.props.graphData.with_imported;
+ const strike = withImported ? "" : " line-through"
+ const target = url.setQuery('with_imported', !withImported)
+ const tip = withImported ? "" : "do not ";
+
+ return (
+
+
+
+ { source[0].toUpperCase() }
+
+
+
+ )
+ }
+ }
+
render() {
const extraClass = this.props.graphData.interval === 'hour' ? '' : 'cursor-pointer'
@@ -356,8 +380,11 @@ class LineGraph extends React.Component {
{ this.renderTopStats() }
- { this.downloadLink() }
- { this.samplingNotice() }
+
+ { this.downloadLink() }
+ { this.samplingNotice() }
+ { this.importedNotice() }
+
diff --git a/config/.env.dev b/config/.env.dev
index d224a66b0133..787a5cd69d42 100644
--- a/config/.env.dev
+++ b/config/.env.dev
@@ -12,4 +12,7 @@ SHOW_CITIES=true
PADDLE_VENDOR_AUTH_CODE=895e20d4efaec0575bb857f44b183217b332d9592e76e69b8a
PADDLE_VENDOR_ID=3942
+GOOGLE_CLIENT_ID=875387135161-l8tp53dpt7fdhdg9m1pc3vl42si95rh0.apps.googleusercontent.com
+GOOGLE_CLIENT_SECRET=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
+
IP_GEOLOCATION_DB=/home/ukutaht/plausible/analytics/city_database.mmdb
diff --git a/config/runtime.exs b/config/runtime.exs
index 91a80a502568..bf9ce9084e8c 100644
--- a/config/runtime.exs
+++ b/config/runtime.exs
@@ -320,7 +320,8 @@ if config_env() == :prod && !disable_cron do
check_stats_emails: 1,
site_setup_emails: 1,
clean_email_verification_codes: 1,
- clean_invitations: 1
+ clean_invitations: 1,
+ google_analytics_imports: 1
]
extra_queues = [
@@ -340,7 +341,7 @@ if config_env() == :prod && !disable_cron do
else
config :plausible, Oban,
repo: Plausible.Repo,
- queues: false,
+ queues: [google_analytics_imports: 1],
plugins: false
end
diff --git a/lib/plausible/clickhouse_repo.ex b/lib/plausible/clickhouse_repo.ex
index 6304c49501f4..b77ef717f7d9 100644
--- a/lib/plausible/clickhouse_repo.ex
+++ b/lib/plausible/clickhouse_repo.ex
@@ -17,4 +17,22 @@ defmodule Plausible.ClickhouseRepo do
Ecto.Adapters.SQL.query!(__MODULE__, events_sql, [domain])
Ecto.Adapters.SQL.query!(__MODULE__, sessions_sql, [domain])
end
+
+ def clear_imported_stats_for(site_id) do
+ [
+ "imported_visitors",
+ "imported_sources",
+ "imported_pages",
+ "imported_entry_pages",
+ "imported_exit_pages",
+ "imported_locations",
+ "imported_devices",
+ "imported_browsers",
+ "imported_operating_systems"
+ ]
+ |> Enum.map(fn table ->
+ sql = "ALTER TABLE #{table} DELETE WHERE site_id = ?"
+ Ecto.Adapters.SQL.query!(__MODULE__, sql, [site_id])
+ end)
+ end
end
diff --git a/lib/plausible/google/api.ex b/lib/plausible/google/api.ex
index db9a37bd1455..44372da25048 100644
--- a/lib/plausible/google/api.ex
+++ b/lib/plausible/google/api.ex
@@ -1,12 +1,18 @@
defmodule Plausible.Google.Api do
- @scope URI.encode_www_form("https://www.googleapis.com/auth/webmasters.readonly email")
+ alias Plausible.Imported
+ use Timex
+
+ @scope URI.encode_www_form(
+ "https://www.googleapis.com/auth/webmasters.readonly email https://www.googleapis.com/auth/analytics.readonly"
+ )
@verified_permission_levels ["siteOwner", "siteFullUser", "siteRestrictedUser"]
- def authorize_url(site_id) do
+ def authorize_url(site_id, redirect_to) do
if Application.get_env(:plausible, :environment) == "test" do
""
else
- "https://accounts.google.com/o/oauth2/v2/auth?client_id=#{client_id()}&redirect_uri=#{redirect_uri()}&prompt=consent&response_type=code&access_type=offline&scope=#{@scope}&state=#{site_id}"
+ "https://accounts.google.com/o/oauth2/v2/auth?client_id=#{client_id()}&redirect_uri=#{redirect_uri()}&prompt=consent&response_type=code&access_type=offline&scope=#{@scope}&state=" <>
+ Jason.encode!([site_id, redirect_to])
end
end
@@ -113,6 +119,226 @@ defmodule Plausible.Google.Api do
end
end
+ def get_analytics_view_ids(site) do
+ with {:ok, auth} <- refresh_if_needed(site.google_auth) do
+ do_get_analytics_view_ids(auth)
+ end
+ end
+
+ def do_get_analytics_view_ids(auth) do
+ res =
+ HTTPoison.get!(
+ "https://www.googleapis.com/analytics/v3/management/accounts/~all/webproperties/~all/profiles",
+ Authorization: "Bearer #{auth.access_token}"
+ )
+
+ case res.status_code do
+ 200 ->
+ profiles =
+ Jason.decode!(res.body)
+ |> Map.get("items")
+ |> Enum.map(fn item ->
+ uri = URI.parse(Map.get(item, "websiteUrl"))
+ name = Map.get(item, "name")
+ {"#{uri.host} - #{name}", Map.get(item, "id")}
+ end)
+ |> Map.new()
+
+ {:ok, profiles}
+
+ _ ->
+ Sentry.capture_message("Error fetching Google view ID", extra: Jason.decode!(res.body))
+ {:error, res.body}
+ end
+ end
+
+ def import_analytics(site, profile) do
+ with {:ok, auth} <- refresh_if_needed(site.google_auth) do
+ do_import_analytics(site, auth, profile)
+ end
+ end
+
+ @doc """
+ API reference:
+ https://developers.google.com/analytics/devguides/reporting/core/v4/rest/v4/reports/batchGet#ReportRequest
+
+ Dimensions reference: https://ga-dev-tools.web.app/dimensions-metrics-explorer
+ """
+ def do_import_analytics(site, auth, profile) do
+ end_date =
+ Plausible.Stats.Clickhouse.pageviews_begin(site)
+ |> NaiveDateTime.to_date()
+
+ end_date =
+ if end_date == ~D[1970-01-01] do
+ Timex.today()
+ else
+ end_date
+ end
+
+ request = %{
+ auth: auth,
+ profile: profile,
+ end_date: Date.to_iso8601(end_date)
+ }
+
+ # Each element is: {dataset, dimensions, metrics}
+ request_data = [
+ {
+ "imported_visitors",
+ ["ga:date"],
+ [
+ "ga:users",
+ "ga:pageviews",
+ "ga:bounces",
+ "ga:sessions",
+ "ga:sessionDuration"
+ ]
+ },
+ {
+ "imported_sources",
+ ["ga:date", "ga:source", "ga:medium", "ga:campaign", "ga:adContent", "ga:keyword"],
+ ["ga:users", "ga:sessions", "ga:bounces", "ga:sessionDuration"]
+ },
+ {
+ "imported_pages",
+ ["ga:date", "ga:hostname", "ga:pagePath"],
+ ["ga:users", "ga:pageviews", "ga:exits", "ga:timeOnPage"]
+ },
+ {
+ "imported_entry_pages",
+ ["ga:date", "ga:landingPagePath"],
+ ["ga:users", "ga:entrances", "ga:sessionDuration", "ga:bounces"]
+ },
+ {
+ "imported_exit_pages",
+ ["ga:date", "ga:exitPagePath"],
+ ["ga:users", "ga:exits"]
+ },
+ {
+ "imported_locations",
+ ["ga:date", "ga:countryIsoCode", "ga:regionIsoCode"],
+ ["ga:users", "ga:sessions", "ga:bounces", "ga:sessionDuration"]
+ },
+ {
+ "imported_devices",
+ ["ga:date", "ga:deviceCategory"],
+ ["ga:users", "ga:sessions", "ga:bounces", "ga:sessionDuration"]
+ },
+ {
+ "imported_browsers",
+ ["ga:date", "ga:browser"],
+ ["ga:users", "ga:sessions", "ga:bounces", "ga:sessionDuration"]
+ },
+ {
+ "imported_operating_systems",
+ ["ga:date", "ga:operatingSystem"],
+ ["ga:users", "ga:sessions", "ga:bounces", "ga:sessionDuration"]
+ }
+ ]
+
+ responses =
+ Enum.map(
+ request_data,
+ fn {dataset, dimensions, metrics} ->
+ fetch_analytic_reports(dataset, dimensions, metrics, request)
+ end
+ )
+
+ case Keyword.get(responses, :error) do
+ nil ->
+ results =
+ responses
+ |> Enum.map(fn {:ok, resp} -> resp end)
+ |> Enum.concat()
+
+ if Enum.any?(results, fn {_, val} -> val end) do
+ maybe_error =
+ results
+ |> Enum.map(fn {dataset, data} ->
+ Imported.from_google_analytics(data, site.id, dataset)
+ end)
+ |> Keyword.get(:error)
+
+ case maybe_error do
+ nil ->
+ {:ok, nil}
+
+ {:error, error} ->
+ Plausible.ClickhouseRepo.clear_imported_stats_for(site.domain)
+
+ Sentry.capture_message("Error saving Google analytics data", extra: error)
+ {:error, error["error"]["message"]}
+ end
+ else
+ {:error, "No Google Analytics data found."}
+ end
+
+ error ->
+ Sentry.capture_message("Error fetching Google analytics data", extra: error)
+ {:error, error}
+ end
+ end
+
+ defp fetch_analytic_reports(dataset, dimensions, metrics, request, page_token \\ "") do
+ report = %{
+ viewId: request.profile,
+ dateRanges: [
+ %{
+ # The earliest valid date
+ startDate: "2005-01-01",
+ endDate: request.end_date
+ }
+ ],
+ dimensions: Enum.map(dimensions, &%{name: &1, histogramBuckets: []}),
+ metrics: Enum.map(metrics, &%{expression: &1}),
+ hideTotals: true,
+ hideValueRanges: true,
+ orderBys: [
+ %{
+ fieldName: "ga:date",
+ sortOrder: "DESCENDING"
+ }
+ ],
+ pageSize: 100_00,
+ pageToken: page_token
+ }
+
+ res =
+ HTTPoison.post!(
+ "https://analyticsreporting.googleapis.com/v4/reports:batchGet",
+ Jason.encode!(%{reportRequests: [report]}),
+ Authorization: "Bearer #{request.auth.access_token}"
+ )
+
+ if res.status_code == 200 do
+ report = List.first(Jason.decode!(res.body)["reports"])
+ data = report["data"]["rows"]
+ next_page_token = report["nextPageToken"]
+
+ if next_page_token do
+ # Recursively make more requests until we run out of next page tokens
+ case fetch_analytic_reports(
+ dataset,
+ dimensions,
+ metrics,
+ request,
+ next_page_token
+ ) do
+ {:ok, %{^dataset => remainder}} ->
+ {:ok, %{dataset => data ++ remainder}}
+
+ error ->
+ error
+ end
+ else
+ {:ok, %{dataset => data}}
+ end
+ else
+ {:error, Jason.decode!(res.body)["error"]["message"]}
+ end
+ end
+
defp refresh_if_needed(auth) do
if Timex.before?(auth.expires, Timex.now() |> Timex.shift(seconds: 30)) do
refresh_token(auth)
diff --git a/lib/plausible/imported/site.ex b/lib/plausible/imported/site.ex
new file mode 100644
index 000000000000..d137fb667656
--- /dev/null
+++ b/lib/plausible/imported/site.ex
@@ -0,0 +1,249 @@
+defmodule Plausible.Imported do
+ use Plausible.ClickhouseRepo
+ use Timex
+
+ def forget(site) do
+ Plausible.ClickhouseRepo.clear_imported_stats_for(site.id)
+ end
+
+ def from_google_analytics(nil, _site_id, _metric, _timezone), do: {:ok, nil}
+
+ def from_google_analytics(data, site_id, table) do
+ data =
+ Enum.map(data, fn row ->
+ new_from_google_analytics(site_id, table, row)
+ end)
+
+ case ClickhouseRepo.insert_all(table, data) do
+ {n_rows, _} when n_rows > 0 -> :ok
+ error -> error
+ end
+ end
+
+ defp new_from_google_analytics(site_id, "imported_visitors", %{
+ "dimensions" => [date],
+ "metrics" => [%{"values" => values}]
+ }) do
+ [visitors, pageviews, bounces, visits, visit_duration] =
+ values
+ |> Enum.map(&Integer.parse/1)
+ |> Enum.map(&elem(&1, 0))
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ visitors: visitors,
+ pageviews: pageviews,
+ bounces: bounces,
+ visits: visits,
+ visit_duration: visit_duration
+ }
+ end
+
+ # Credit: https://github.com/kvesteri/validators
+ @domain ~r/^(([a-zA-Z]{1})|([a-zA-Z]{1}[a-zA-Z]{1})|([a-zA-Z]{1}[0-9]{1})|([0-9]{1}[a-zA-Z]{1})|([a-zA-Z0-9][-_.a-zA-Z0-9]{0,61}[a-zA-Z0-9]))\.([a-zA-Z]{2,13}|[a-zA-Z0-9-]{2,30}.[a-zA-Z]{2,3})$/
+
+ defp new_from_google_analytics(site_id, "imported_sources", %{
+ "dimensions" => [date, source, medium, campaign, content, term],
+ "metrics" => [%{"values" => [visitors, visits, bounces, visit_duration]}]
+ }) do
+ {visitors, ""} = Integer.parse(visitors)
+ {visits, ""} = Integer.parse(visits)
+ {bounces, ""} = Integer.parse(bounces)
+ {visit_duration, _} = Integer.parse(visit_duration)
+
+ source = if source == "(direct)", do: nil, else: source
+ source = if source && String.match?(source, @domain), do: parse_referrer(source), else: source
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ source: parse_referrer(source),
+ utm_medium: nil_if_missing(medium),
+ utm_campaign: nil_if_missing(campaign),
+ utm_content: nil_if_missing(content),
+ utm_term: nil_if_missing(term),
+ visitors: visitors,
+ visits: visits,
+ bounces: bounces,
+ visit_duration: visit_duration
+ }
+ end
+
+ defp new_from_google_analytics(site_id, "imported_pages", %{
+ "dimensions" => [date, hostname, page],
+ "metrics" => [%{"values" => [visitors, pageviews, exits, time_on_page]}]
+ }) do
+ page = URI.parse(page).path
+ {visitors, ""} = Integer.parse(visitors)
+ {pageviews, ""} = Integer.parse(pageviews)
+ {exits, ""} = Integer.parse(exits)
+ {time_on_page, _} = Integer.parse(time_on_page)
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ hostname: String.replace_prefix(hostname, "www.", ""),
+ page: page,
+ visitors: visitors,
+ pageviews: pageviews,
+ exits: exits,
+ time_on_page: time_on_page
+ }
+ end
+
+ defp new_from_google_analytics(site_id, "imported_entry_pages", %{
+ "dimensions" => [date, entry_page],
+ "metrics" => [%{"values" => [visitors, entrances, visit_duration, bounces]}]
+ }) do
+ {visitors, ""} = Integer.parse(visitors)
+ {entrances, ""} = Integer.parse(entrances)
+ {bounces, ""} = Integer.parse(bounces)
+ {visit_duration, _} = Integer.parse(visit_duration)
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ entry_page: entry_page,
+ visitors: visitors,
+ entrances: entrances,
+ visit_duration: visit_duration,
+ bounces: bounces
+ }
+ end
+
+ defp new_from_google_analytics(site_id, "imported_exit_pages", %{
+ "dimensions" => [date, exit_page],
+ "metrics" => [%{"values" => [visitors, exits]}]
+ }) do
+ {visitors, ""} = Integer.parse(visitors)
+ {exits, ""} = Integer.parse(exits)
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ exit_page: exit_page,
+ visitors: visitors,
+ exits: exits
+ }
+ end
+
+ defp new_from_google_analytics(site_id, "imported_locations", %{
+ "dimensions" => [date, country, region],
+ "metrics" => [%{"values" => [visitors, visits, bounces, visit_duration]}]
+ }) do
+ country = if country == "(not set)", do: "", else: country
+ region = if region == "(not set)", do: "", else: region
+ {visitors, ""} = Integer.parse(visitors)
+ {visits, ""} = Integer.parse(visits)
+ {bounces, ""} = Integer.parse(bounces)
+ {visit_duration, _} = Integer.parse(visit_duration)
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ country: country,
+ region: region,
+ city: 0,
+ visitors: visitors,
+ visits: visits,
+ bounces: bounces,
+ visit_duration: visit_duration
+ }
+ end
+
+ defp new_from_google_analytics(site_id, "imported_devices", %{
+ "dimensions" => [date, device],
+ "metrics" => [%{"values" => [visitors, visits, bounces, visit_duration]}]
+ }) do
+ {visitors, ""} = Integer.parse(visitors)
+ {visits, ""} = Integer.parse(visits)
+ {bounces, ""} = Integer.parse(bounces)
+ {visit_duration, _} = Integer.parse(visit_duration)
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ device: String.capitalize(device),
+ visitors: visitors,
+ visits: visits,
+ bounces: bounces,
+ visit_duration: visit_duration
+ }
+ end
+
+ @browser_google_to_plausible %{
+ "User-Agent:Opera" => "Opera",
+ "Mozilla Compatible Agent" => "Mobile App",
+ "Android Webview" => "Mobile App",
+ "Android Browser" => "Mobile App",
+ "Safari (in-app)" => "Mobile App",
+ "User-Agent: Mozilla" => "Firefox",
+ "(not set)" => ""
+ }
+
+ defp new_from_google_analytics(site_id, "imported_browsers", %{
+ "dimensions" => [date, browser],
+ "metrics" => [%{"values" => [visitors, visits, bounces, visit_duration]}]
+ }) do
+ {visitors, ""} = Integer.parse(visitors)
+ {visits, ""} = Integer.parse(visits)
+ {bounces, ""} = Integer.parse(bounces)
+ {visit_duration, _} = Integer.parse(visit_duration)
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ browser: Map.get(@browser_google_to_plausible, browser, browser),
+ visitors: visitors,
+ visits: visits,
+ bounces: bounces,
+ visit_duration: visit_duration
+ }
+ end
+
+ @os_google_to_plausible %{
+ "Macintosh" => "Mac",
+ "Linux" => "GNU/Linux",
+ "(not set)" => ""
+ }
+
+ defp new_from_google_analytics(site_id, "imported_operating_systems", %{
+ "dimensions" => [date, operating_system],
+ "metrics" => [%{"values" => [visitors, visits, bounces, visit_duration]}]
+ }) do
+ {visitors, ""} = Integer.parse(visitors)
+ {visits, ""} = Integer.parse(visits)
+ {bounces, ""} = Integer.parse(bounces)
+ {visit_duration, _} = Integer.parse(visit_duration)
+
+ %{
+ site_id: site_id,
+ date: format_date(date),
+ operating_system: Map.get(@os_google_to_plausible, operating_system, operating_system),
+ visitors: visitors,
+ visits: visits,
+ bounces: bounces,
+ visit_duration: visit_duration
+ }
+ end
+
+ defp format_date(date) do
+ Timex.parse!("#{date}", "%Y%m%d", :strftime)
+ |> NaiveDateTime.to_date()
+ end
+
+ @missing_values ["(none)", "(not set)", "(not provided)"]
+ def nil_if_missing(value) when value in @missing_values, do: nil
+ def nil_if_missing(value), do: value
+
+ def parse_referrer(nil), do: nil
+ def parse_referrer("google"), do: "Google"
+ def parse_referrer("bing"), do: "Bing"
+ def parse_referrer("duckduckgo"), do: "DuckDuckGo"
+
+ def parse_referrer(ref) do
+ RefInspector.parse("https://" <> ref)
+ |> PlausibleWeb.RefInspector.parse()
+ end
+end
diff --git a/lib/plausible/site/schema.ex b/lib/plausible/site/schema.ex
index f15f9ef76ff8..07de7266e627 100644
--- a/lib/plausible/site/schema.ex
+++ b/lib/plausible/site/schema.ex
@@ -1,3 +1,13 @@
+defmodule Plausible.Site.ImportedData do
+ use Ecto.Schema
+
+ embedded_schema do
+ field :end_date, :date
+ field :source, :string
+ field :status, :string
+ end
+end
+
defmodule Plausible.Site do
use Ecto.Schema
import Ecto.Changeset
@@ -12,6 +22,8 @@ defmodule Plausible.Site do
field :locked, :boolean
field :has_stats, :boolean
+ embeds_one :imported_data, Plausible.Site.ImportedData, on_replace: :update
+
many_to_many :members, User, join_through: Plausible.Site.Membership
has_many :memberships, Plausible.Site.Membership
has_many :invitations, Plausible.Auth.Invitation
@@ -47,6 +59,37 @@ defmodule Plausible.Site do
change(site, has_stats: has_stats_val)
end
+ def start_import(site, imported_source, status \\ "importing") do
+ change(site,
+ imported_data: %Plausible.Site.ImportedData{
+ end_date: Timex.today(),
+ source: imported_source,
+ status: status
+ }
+ )
+ end
+
+ def import_success(site) do
+ change(site, imported_data: %{status: "ok"})
+ end
+
+ def import_failure(site) do
+ change(site, imported_data: %{status: "error"})
+ end
+
+ def set_imported_source(site, imported_source) do
+ change(site,
+ imported_data: %Plausible.Site.ImportedData{
+ end_date: Timex.today(),
+ source: imported_source
+ }
+ )
+ end
+
+ def remove_imported_data(site) do
+ change(site, imported_data: nil)
+ end
+
defp clean_domain(changeset) do
clean_domain =
(get_field(changeset, :domain) || "")
diff --git a/lib/plausible/stats/aggregate.ex b/lib/plausible/stats/aggregate.ex
index 1880c6278243..dba5023d0cb0 100644
--- a/lib/plausible/stats/aggregate.ex
+++ b/lib/plausible/stats/aggregate.ex
@@ -1,10 +1,10 @@
defmodule Plausible.Stats.Aggregate do
alias Plausible.Stats.Query
use Plausible.ClickhouseRepo
- import Plausible.Stats.Base
+ import Plausible.Stats.{Base, Imported}
- @event_metrics ["visitors", "pageviews", "events", "sample_percent"]
- @session_metrics ["visits", "bounce_rate", "visit_duration", "sample_percent"]
+ @event_metrics [:visitors, :pageviews, :events, :sample_percent]
+ @session_metrics [:visits, :bounce_rate, :visit_duration, :sample_percent]
def aggregate(site, query, metrics) do
event_metrics = Enum.filter(metrics, &(&1 in @event_metrics))
@@ -13,7 +13,7 @@ defmodule Plausible.Stats.Aggregate do
session_task = Task.async(fn -> aggregate_sessions(site, query, session_metrics) end)
time_on_page_task =
- if "time_on_page" in metrics do
+ if :time_on_page in metrics do
Task.async(fn -> aggregate_time_on_page(site, query) end)
else
Task.async(fn -> %{} end)
@@ -23,7 +23,7 @@ defmodule Plausible.Stats.Aggregate do
|> Map.merge(Task.await(event_task, 10_000))
|> Map.merge(Task.await(time_on_page_task, 10_000))
|> Enum.map(fn {metric, value} ->
- {metric, %{"value" => round(value || 0)}}
+ {metric, %{value: round(value || 0)}}
end)
|> Enum.into(%{})
end
@@ -33,6 +33,7 @@ defmodule Plausible.Stats.Aggregate do
defp aggregate_events(site, query, metrics) do
from(e in base_event_query(site, query), select: %{})
|> select_event_metrics(metrics)
+ |> merge_imported(site, query, :aggregate, metrics)
|> ClickhouseRepo.one()
end
@@ -44,6 +45,7 @@ defmodule Plausible.Stats.Aggregate do
from(e in query_sessions(site, query), select: %{})
|> filter_converted_sessions(site, query)
|> select_session_metrics(metrics)
+ |> merge_imported(site, query, :aggregate, metrics)
|> ClickhouseRepo.one()
end
@@ -106,6 +108,6 @@ defmodule Plausible.Stats.Aggregate do
{:ok, res} = ClickhouseRepo.query(time_query, base_query_raw_params ++ [where_arg])
[[time_on_page]] = res.rows
- %{"time_on_page" => time_on_page}
+ %{time_on_page: time_on_page}
end
end
diff --git a/lib/plausible/stats/base.ex b/lib/plausible/stats/base.ex
index 9d356f449cc2..5452ada74848 100644
--- a/lib/plausible/stats/base.ex
+++ b/lib/plausible/stats/base.ex
@@ -1,6 +1,7 @@
defmodule Plausible.Stats.Base do
use Plausible.ClickhouseRepo
alias Plausible.Stats.{Query, Filters}
+ import Ecto.Query
@no_ref "Direct / None"
@@ -196,36 +197,36 @@ defmodule Plausible.Stats.Base do
def select_event_metrics(q, []), do: q
- def select_event_metrics(q, ["pageviews" | rest]) do
+ def select_event_metrics(q, [:pageviews | rest]) do
from(e in q,
select_merge: %{
- "pageviews" =>
+ pageviews:
fragment("toUInt64(round(countIf(? = 'pageview') * any(_sample_factor)))", e.name)
}
)
|> select_event_metrics(rest)
end
- def select_event_metrics(q, ["events" | rest]) do
+ def select_event_metrics(q, [:events | rest]) do
from(e in q,
- select_merge: %{"events" => fragment("toUInt64(round(count(*) * any(_sample_factor)))")}
+ select_merge: %{events: fragment("toUInt64(round(count(*) * any(_sample_factor)))")}
)
|> select_event_metrics(rest)
end
- def select_event_metrics(q, ["visitors" | rest]) do
+ def select_event_metrics(q, [:visitors | rest]) do
from(e in q,
select_merge: %{
- "visitors" => fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", e.user_id)
+ visitors: fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", e.user_id)
}
)
|> select_event_metrics(rest)
end
- def select_event_metrics(q, ["sample_percent" | rest]) do
+ def select_event_metrics(q, [:sample_percent | rest]) do
from(e in q,
select_merge: %{
- "sample_percent" =>
+ sample_percent:
fragment("if(any(_sample_factor) > 1, round(100 / any(_sample_factor)), 100)")
}
)
@@ -236,58 +237,59 @@ defmodule Plausible.Stats.Base do
def select_session_metrics(q, []), do: q
- def select_session_metrics(q, ["bounce_rate" | rest]) do
+ def select_session_metrics(q, [:bounce_rate | rest]) do
from(s in q,
select_merge: %{
- "bounce_rate" =>
- fragment("toUInt32(ifNotFinite(round(sum(is_bounce * sign) / sum(sign) * 100), 0))")
+ bounce_rate:
+ fragment("toUInt32(ifNotFinite(round(sum(is_bounce * sign) / sum(sign) * 100), 0))"),
+ visits: fragment("toUInt32(sum(sign))")
}
)
|> select_session_metrics(rest)
end
- def select_session_metrics(q, ["visits" | rest]) do
+ def select_session_metrics(q, [:visits | rest]) do
from(s in q,
select_merge: %{
- "visits" => fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", s.session_id)
+ visits: fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", s.session_id)
}
)
|> select_session_metrics(rest)
end
- def select_session_metrics(q, ["pageviews" | rest]) do
+ def select_session_metrics(q, [:pageviews | rest]) do
from(s in q,
select_merge: %{
- "pageviews" =>
+ pageviews:
fragment("toUInt64(round(sum(? * ?) * any(_sample_factor)))", s.sign, s.pageviews)
}
)
|> select_session_metrics(rest)
end
- def select_session_metrics(q, ["visitors" | rest]) do
+ def select_session_metrics(q, [:visitors | rest]) do
from(s in q,
select_merge: %{
- "visitors" => fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", s.user_id)
+ visitors: fragment("toUInt64(round(uniq(?) * any(_sample_factor)))", s.user_id)
}
)
|> select_session_metrics(rest)
end
- def select_session_metrics(q, ["visit_duration" | rest]) do
+ def select_session_metrics(q, [:visit_duration | rest]) do
from(s in q,
select_merge: %{
- "visit_duration" =>
+ :visit_duration =>
fragment("toUInt32(ifNotFinite(round(sum(duration * sign) / sum(sign)), 0))")
}
)
|> select_session_metrics(rest)
end
- def select_session_metrics(q, ["sample_percent" | rest]) do
+ def select_session_metrics(q, [:sample_percent | rest]) do
from(e in q,
select_merge: %{
- "sample_percent" =>
+ sample_percent:
fragment("if(any(_sample_factor) > 1, round(100 / any(_sample_factor)), 100)")
}
)
@@ -319,21 +321,21 @@ defmodule Plausible.Stats.Base do
defp db_prop_val(:utm_term, @no_ref), do: ""
defp db_prop_val(_, val), do: val
- defp utc_boundaries(%Query{period: "realtime"}, _timezone) do
+ def utc_boundaries(%Query{period: "realtime"}, _timezone) do
last_datetime = NaiveDateTime.utc_now() |> Timex.shift(seconds: 5)
first_datetime = NaiveDateTime.utc_now() |> Timex.shift(minutes: -5)
{first_datetime, last_datetime}
end
- defp utc_boundaries(%Query{period: "30m"}, _timezone) do
+ def utc_boundaries(%Query{period: "30m"}, _timezone) do
last_datetime = NaiveDateTime.utc_now() |> Timex.shift(seconds: 5)
first_datetime = NaiveDateTime.utc_now() |> Timex.shift(minutes: -30)
{first_datetime, last_datetime}
end
- defp utc_boundaries(%Query{date_range: date_range}, timezone) do
+ def utc_boundaries(%Query{date_range: date_range}, timezone) do
{:ok, first} = NaiveDateTime.new(date_range.first, ~T[00:00:00])
first_datetime =
diff --git a/lib/plausible/stats/breakdown.ex b/lib/plausible/stats/breakdown.ex
index 72eb1e651f42..ccef6a49a72d 100644
--- a/lib/plausible/stats/breakdown.ex
+++ b/lib/plausible/stats/breakdown.ex
@@ -1,11 +1,11 @@
defmodule Plausible.Stats.Breakdown do
use Plausible.ClickhouseRepo
- import Plausible.Stats.Base
+ import Plausible.Stats.{Base, Imported}
alias Plausible.Stats.Query
@no_ref "Direct / None"
- @event_metrics ["visitors", "pageviews", "events"]
- @session_metrics ["visits", "bounce_rate", "visit_duration"]
+ @event_metrics [:visitors, :pageviews, :events]
+ @session_metrics [:visits, :bounce_rate, :visit_duration]
@event_props ["event:page", "event:page_match", "event:name"]
def breakdown(site, query, "event:goal", metrics, pagination) do
@@ -19,7 +19,7 @@ defmodule Plausible.Stats.Breakdown do
event_results =
if Enum.any?(event_goals) do
breakdown(site, event_query, "event:name", metrics, pagination)
- |> transform_keys(%{"name" => "goal"})
+ |> transform_keys(%{name: :goal})
else
[]
end
@@ -44,18 +44,18 @@ defmodule Plausible.Stats.Breakdown do
),
group_by: fragment("index"),
select: %{
- "index" => fragment("arrayJoin(indices) as index"),
- "goal" => fragment("concat('Visit ', array(?)[index])", ^page_exprs)
+ index: fragment("arrayJoin(indices) as index"),
+ goal: fragment("concat('Visit ', array(?)[index])", ^page_exprs)
}
)
|> select_event_metrics(metrics)
|> ClickhouseRepo.all()
- |> Enum.map(fn row -> Map.delete(row, "index") end)
+ |> Enum.map(fn row -> Map.delete(row, :index) end)
else
[]
end
- zip_results(event_results, page_results, "event:goal", metrics)
+ zip_results(event_results, page_results, :goal, metrics)
end
def breakdown(site, query, "event:props:" <> custom_prop, metrics, pagination) do
@@ -79,6 +79,7 @@ defmodule Plausible.Stats.Breakdown do
end
results = breakdown_events(site, query, "event:props:" <> custom_prop, metrics, pagination)
+
zipped = zip_results(none_result, results, custom_prop, metrics)
if Enum.find_index(zipped, fn value -> value[custom_prop] == "(none)" end) == limit do
@@ -95,12 +96,12 @@ defmodule Plausible.Stats.Breakdown do
event_result = breakdown_events(site, query, "event:page", event_metrics, pagination)
event_result =
- if "time_on_page" in metrics do
- pages = Enum.map(event_result, & &1["page"])
+ if :time_on_page in metrics do
+ pages = Enum.map(event_result, & &1[:page])
time_on_page_result = breakdown_time_on_page(site, query, pages)
Enum.map(event_result, fn row ->
- Map.put(row, "time_on_page", time_on_page_result[row["page"]])
+ Map.put(row, :time_on_page, time_on_page_result[row[:page]])
end)
else
event_result
@@ -112,21 +113,24 @@ defmodule Plausible.Stats.Breakdown do
query
pages ->
- Query.put_filter(query, "visit:entry_page", {:member, Enum.map(pages, & &1["page"])})
+ Query.put_filter(query, "visit:entry_page", {:member, Enum.map(pages, & &1[:page])})
end
{limit, _page} = pagination
session_result =
breakdown_sessions(site, new_query, "visit:entry_page", session_metrics, {limit, 1})
- |> transform_keys(%{"entry_page" => "page"})
+ |> transform_keys(%{entry_page: :page})
+
+ metrics = metrics ++ [:page]
zip_results(
event_result,
session_result,
- "event:page",
+ :page,
metrics
)
+ |> Enum.map(&Map.take(&1, metrics))
end
def breakdown(site, query, property, metrics, pagination) when property in @event_props do
@@ -143,7 +147,16 @@ defmodule Plausible.Stats.Breakdown do
"visit:utm_term"
] do
query = Query.treat_page_filter_as_entry_page(query)
- breakdown_sessions(site, query, property, metrics, pagination)
+
+ # "visits" is fetched when querying bounce rate and visit duration, as it
+ # is needed to calculate these from imported data. Let's remove it from the
+ # result if it wasn't requested.
+ if (:bounce_rate in metrics or :visit_duration in metrics) and :visits not in metrics do
+ breakdown_sessions(site, query, property, metrics, pagination)
+ |> Enum.map(&Map.delete(&1, :visits))
+ else
+ breakdown_sessions(site, query, property, metrics, pagination)
+ end
end
def breakdown(site, query, property, metrics, pagination) do
@@ -151,13 +164,17 @@ defmodule Plausible.Stats.Breakdown do
end
defp zip_results(event_result, session_result, property, metrics) do
- sort_by = if Enum.member?(metrics, "visitors"), do: "visitors", else: List.first(metrics)
+ sort_by = if Enum.member?(metrics, :visitors), do: :visitors, else: List.first(metrics)
property =
- property
- |> String.trim_leading("event:")
- |> String.trim_leading("visit:")
- |> String.trim_leading("props:")
+ if is_binary(property) do
+ property
+ |> String.trim_leading("event:")
+ |> String.trim_leading("visit:")
+ |> String.trim_leading("props:")
+ else
+ property
+ end
null_row = Enum.map(metrics, fn metric -> {metric, nil} end) |> Enum.into(%{})
@@ -177,35 +194,33 @@ defmodule Plausible.Stats.Breakdown do
defp breakdown_sessions(_, _, _, [], _), do: []
- defp breakdown_sessions(site, query, property, metrics, {limit, page}) do
- offset = (page - 1) * limit
-
+ defp breakdown_sessions(site, query, property, metrics, pagination) do
from(s in query_sessions(site, query),
order_by: [desc: fragment("uniq(?)", s.user_id), asc: fragment("min(?)", s.start)],
- limit: ^limit,
- offset: ^offset,
select: %{}
)
|> filter_converted_sessions(site, query)
|> do_group_by(property)
|> select_session_metrics(metrics)
+ |> merge_imported(site, query, property, metrics)
+ |> apply_pagination(pagination)
|> ClickhouseRepo.all()
+ |> transform_keys(%{operating_system: :os})
end
defp breakdown_events(_, _, _, [], _), do: []
- defp breakdown_events(site, query, property, metrics, {limit, page}) do
- offset = (page - 1) * limit
-
+ defp breakdown_events(site, query, property, metrics, pagination) do
from(e in base_event_query(site, query),
order_by: [desc: fragment("uniq(?)", e.user_id)],
- limit: ^limit,
- offset: ^offset,
select: %{}
)
|> do_group_by(property)
|> select_event_metrics(metrics)
+ |> merge_imported(site, query, property, metrics)
+ |> apply_pagination(pagination)
|> ClickhouseRepo.all()
+ |> transform_keys(%{operating_system: :os})
end
defp breakdown_time_on_page(_site, _query, []) do
@@ -229,10 +244,17 @@ defmodule Plausible.Stats.Breakdown do
{base_query_raw, base_query_raw_params} = ClickhouseRepo.to_sql(:all, q)
+ select =
+ if query.include_imported do
+ "sum(td), count(case when p2 != p then 1 end)"
+ else
+ "round(sum(td)/count(case when p2 != p then 1 end))"
+ end
+
time_query = "
SELECT
p,
- round(sum(td)/count(case when p2 != p then 1 end)) as avgTime
+ #{select}
FROM
(SELECT
p,
@@ -250,7 +272,39 @@ defmodule Plausible.Stats.Breakdown do
GROUP BY p"
{:ok, res} = ClickhouseRepo.query(time_query, base_query_raw_params ++ [pages])
- res.rows |> Enum.map(fn [page, time] -> {page, time} end) |> Enum.into(%{})
+
+ if query.include_imported do
+ # Imported page views have pre-calculated values
+ res =
+ res.rows
+ |> Enum.map(fn [page, time, visits] -> {page, {time, visits}} end)
+ |> Enum.into(%{})
+
+ from(
+ i in "imported_pages",
+ group_by: i.page,
+ where: i.site_id == ^site.id,
+ where: i.date >= ^query.date_range.first and i.date <= ^query.date_range.last,
+ where: i.page in ^pages,
+ select: %{
+ page: i.page,
+ pageviews: fragment("sum(?) - sum(?)", i.pageviews, i.exits),
+ time_on_page: sum(i.time_on_page)
+ }
+ )
+ |> ClickhouseRepo.all()
+ |> Enum.reduce(res, fn %{page: page, pageviews: pageviews, time_on_page: time}, res ->
+ {restime, resviews} = Map.get(res, page, {0, 0})
+ Map.put(res, page, {restime + time, resviews + pageviews})
+ end)
+ |> Enum.map(fn
+ {page, {_, 0}} -> {page, nil}
+ {page, {time, pageviews}} -> {page, time / pageviews}
+ end)
+ |> Enum.into(%{})
+ else
+ res.rows |> Enum.map(fn [page, time] -> {page, time} end) |> Enum.into(%{})
+ end
end
defp do_group_by(
@@ -289,7 +343,7 @@ defmodule Plausible.Stats.Breakdown do
from(
e in q,
group_by: e.name,
- select_merge: %{"name" => e.name}
+ select_merge: %{name: e.name}
)
end
@@ -300,7 +354,7 @@ defmodule Plausible.Stats.Breakdown do
from(
e in q,
group_by: e.pathname,
- select_merge: %{"page" => e.pathname}
+ select_merge: %{page: e.pathname}
)
end
@@ -314,8 +368,8 @@ defmodule Plausible.Stats.Breakdown do
e in q,
group_by: fragment("index"),
select_merge: %{
- "index" => fragment("arrayJoin(indices) as index"),
- "page_match" => fragment("array(?)[index]", ^match_exprs)
+ index: fragment("arrayJoin(indices) as index"),
+ page_match: fragment("array(?)[index]", ^match_exprs)
}
)
end
@@ -326,7 +380,7 @@ defmodule Plausible.Stats.Breakdown do
s in q,
group_by: s.referrer_source,
select_merge: %{
- "source" => fragment("if(empty(?), ?, ?)", s.referrer_source, @no_ref, s.referrer_source)
+ source: fragment("if(empty(?), ?, ?)", s.referrer_source, @no_ref, s.referrer_source)
}
)
end
@@ -335,8 +389,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.country_code,
- where: s.country_code != "\0\0",
- select_merge: %{"country" => s.country_code}
+ select_merge: %{country: s.country_code}
)
end
@@ -344,8 +397,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.subdivision1_code,
- where: s.subdivision1_code != "",
- select_merge: %{"region" => s.subdivision1_code}
+ select_merge: %{region: s.subdivision1_code}
)
end
@@ -353,8 +405,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.city_geoname_id,
- where: s.city_geoname_id != 0,
- select_merge: %{"city" => s.city_geoname_id}
+ select_merge: %{city: s.city_geoname_id}
)
end
@@ -362,7 +413,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.entry_page,
- select_merge: %{"entry_page" => s.entry_page}
+ select_merge: %{entry_page: s.entry_page}
)
end
@@ -370,7 +421,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.exit_page,
- select_merge: %{"exit_page" => s.exit_page}
+ select_merge: %{exit_page: s.exit_page}
)
end
@@ -379,7 +430,7 @@ defmodule Plausible.Stats.Breakdown do
s in q,
group_by: s.referrer,
select_merge: %{
- "referrer" => fragment("if(empty(?), ?, ?)", s.referrer, @no_ref, s.referrer)
+ referrer: fragment("if(empty(?), ?, ?)", s.referrer, @no_ref, s.referrer)
}
)
end
@@ -389,7 +440,7 @@ defmodule Plausible.Stats.Breakdown do
s in q,
group_by: s.utm_medium,
select_merge: %{
- "utm_medium" => fragment("if(empty(?), ?, ?)", s.utm_medium, @no_ref, s.utm_medium)
+ utm_medium: fragment("if(empty(?), ?, ?)", s.utm_medium, @no_ref, s.utm_medium)
}
)
end
@@ -399,7 +450,7 @@ defmodule Plausible.Stats.Breakdown do
s in q,
group_by: s.utm_source,
select_merge: %{
- "utm_source" => fragment("if(empty(?), ?, ?)", s.utm_source, @no_ref, s.utm_source)
+ utm_source: fragment("if(empty(?), ?, ?)", s.utm_source, @no_ref, s.utm_source)
}
)
end
@@ -409,7 +460,7 @@ defmodule Plausible.Stats.Breakdown do
s in q,
group_by: s.utm_campaign,
select_merge: %{
- "utm_campaign" => fragment("if(empty(?), ?, ?)", s.utm_campaign, @no_ref, s.utm_campaign)
+ utm_campaign: fragment("if(empty(?), ?, ?)", s.utm_campaign, @no_ref, s.utm_campaign)
}
)
end
@@ -419,7 +470,7 @@ defmodule Plausible.Stats.Breakdown do
s in q,
group_by: s.utm_content,
select_merge: %{
- "utm_content" => fragment("if(empty(?), ?, ?)", s.utm_content, @no_ref, s.utm_content)
+ utm_content: fragment("if(empty(?), ?, ?)", s.utm_content, @no_ref, s.utm_content)
}
)
end
@@ -429,7 +480,7 @@ defmodule Plausible.Stats.Breakdown do
s in q,
group_by: s.utm_term,
select_merge: %{
- "utm_term" => fragment("if(empty(?), ?, ?)", s.utm_term, @no_ref, s.utm_term)
+ utm_term: fragment("if(empty(?), ?, ?)", s.utm_term, @no_ref, s.utm_term)
}
)
end
@@ -438,7 +489,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.screen_size,
- select_merge: %{"device" => s.screen_size}
+ select_merge: %{device: s.screen_size}
)
end
@@ -446,7 +497,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.operating_system,
- select_merge: %{"os" => s.operating_system}
+ select_merge: %{operating_system: s.operating_system}
)
end
@@ -454,7 +505,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.operating_system_version,
- select_merge: %{"os_version" => s.operating_system_version}
+ select_merge: %{os_version: s.operating_system_version}
)
end
@@ -462,7 +513,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.browser,
- select_merge: %{"browser" => s.browser}
+ select_merge: %{browser: s.browser}
)
end
@@ -470,7 +521,7 @@ defmodule Plausible.Stats.Breakdown do
from(
s in q,
group_by: s.browser_version,
- select_merge: %{"browser_version" => s.browser_version}
+ select_merge: %{browser_version: s.browser_version}
)
end
@@ -482,4 +533,12 @@ defmodule Plausible.Stats.Breakdown do
|> Enum.into(%{})
end)
end
+
+ defp apply_pagination(q, {limit, page}) do
+ offset = (page - 1) * limit
+
+ q
+ |> Ecto.Query.limit(^limit)
+ |> Ecto.Query.offset(^offset)
+ end
end
diff --git a/lib/plausible/stats/clickhouse.ex b/lib/plausible/stats/clickhouse.ex
index 5fbb89255b8a..76dd9eb700e3 100644
--- a/lib/plausible/stats/clickhouse.ex
+++ b/lib/plausible/stats/clickhouse.ex
@@ -98,7 +98,7 @@ defmodule Plausible.Stats.Clickhouse do
defp filter_converted_sessions(db_query, site, query) do
goal = query.filters["goal"]
- page = query.filters["page"]
+ page = query.filters[:page]
if is_binary(goal) || is_binary(page) do
converted_sessions =
@@ -116,7 +116,7 @@ defmodule Plausible.Stats.Clickhouse do
end
defp apply_page_as_entry_page(db_query, _site, query) do
- include_path_filter_entry(db_query, query.filters["page"])
+ include_path_filter_entry(db_query, query.filters[:page])
end
def current_visitors(site, query) do
@@ -382,7 +382,7 @@ defmodule Plausible.Stats.Clickhouse do
q
end
- q = include_path_filter(q, query.filters["page"])
+ q = include_path_filter(q, query.filters[:page])
if query.filters["props"] do
[{key, val}] = query.filters["props"] |> Enum.into([])
@@ -560,4 +560,12 @@ defmodule Plausible.Stats.Clickhouse do
db_query
end
end
+
+ def pageviews_begin(site) do
+ ClickhouseRepo.one(
+ from e in "events",
+ where: e.domain == ^site.domain and e.name == "pageview",
+ select: min(e.timestamp)
+ )
+ end
end
diff --git a/lib/plausible/stats/compare.ex b/lib/plausible/stats/compare.ex
index 2e98d908c586..99c6fc0bd976 100644
--- a/lib/plausible/stats/compare.ex
+++ b/lib/plausible/stats/compare.ex
@@ -1,14 +1,14 @@
defmodule Plausible.Stats.Compare do
- def calculate_change("bounce_rate", old_stats, new_stats) do
- old_count = old_stats["bounce_rate"]["value"]
- new_count = new_stats["bounce_rate"]["value"]
+ def calculate_change(:bounce_rate, old_stats, new_stats) do
+ old_count = old_stats[:bounce_rate][:value]
+ new_count = new_stats[:bounce_rate][:value]
if old_count > 0, do: new_count - old_count
end
def calculate_change(metric, old_stats, new_stats) do
- old_count = old_stats[metric]["value"]
- new_count = new_stats[metric]["value"]
+ old_count = old_stats[metric][:value]
+ new_count = new_stats[metric][:value]
percent_change(old_count, new_count)
end
diff --git a/lib/plausible/stats/imported.ex b/lib/plausible/stats/imported.ex
new file mode 100644
index 000000000000..f39e8a27be57
--- /dev/null
+++ b/lib/plausible/stats/imported.ex
@@ -0,0 +1,434 @@
+defmodule Plausible.Stats.Imported do
+ use Plausible.ClickhouseRepo
+ alias Plausible.Stats.Query
+ import Ecto.Query
+
+ @no_ref "Direct / None"
+
+ def merge_imported_timeseries(native_q, _, %Plausible.Stats.Query{include_imported: false}, _),
+ do: native_q
+
+ def merge_imported_timeseries(
+ native_q,
+ site,
+ query,
+ metrics
+ ) do
+ imported_q =
+ from(v in "imported_visitors",
+ where: v.site_id == ^site.id,
+ where: v.date >= ^query.date_range.first and v.date <= ^query.date_range.last,
+ select: %{visitors: sum(v.visitors)}
+ )
+ |> apply_interval(query)
+
+ from(s in Ecto.Query.subquery(native_q),
+ full_join: i in subquery(imported_q),
+ on: field(s, :date) == field(i, :date)
+ )
+ |> select_joined_metrics(metrics)
+ end
+
+ defp apply_interval(imported_q, %Plausible.Stats.Query{interval: "month"}) do
+ imported_q
+ |> group_by([i], fragment("toStartOfMonth(?)", i.date))
+ |> select_merge([i], %{date: fragment("toStartOfMonth(?)", i.date)})
+ end
+
+ defp apply_interval(imported_q, _query) do
+ imported_q
+ |> group_by([i], i.date)
+ |> select_merge([i], %{date: i.date})
+ end
+
+ def merge_imported(q, _, %Query{include_imported: false}, _, _), do: q
+ def merge_imported(q, _, _, _, [:events | _]), do: q
+ # GA only has 'source'
+ def merge_imported(q, _, _, "utm_source", _), do: q
+
+ def merge_imported(q, site, query, property, metrics)
+ when property in [
+ "visit:source",
+ "visit:utm_medium",
+ "visit:utm_campaign",
+ "visit:utm_term",
+ "visit:utm_content",
+ "visit:entry_page",
+ "visit:exit_page",
+ "visit:country",
+ "visit:region",
+ "visit:city",
+ "visit:device",
+ "visit:browser",
+ "visit:os",
+ "event:page"
+ ] do
+ {table, dim} =
+ case property do
+ "visit:country" ->
+ {"imported_locations", :country}
+
+ "visit:region" ->
+ {"imported_locations", :region}
+
+ "visit:city" ->
+ {"imported_locations", :city}
+
+ "visit:utm_medium" ->
+ {"imported_sources", :utm_medium}
+
+ "visit:utm_campaign" ->
+ {"imported_sources", :utm_campaign}
+
+ "visit:utm_term" ->
+ {"imported_sources", :utm_term}
+
+ "visit:utm_content" ->
+ {"imported_sources", :utm_content}
+
+ "visit:os" ->
+ {"imported_operating_systems", :operating_system}
+
+ "event:page" ->
+ {"imported_pages", :page}
+
+ _ ->
+ dim = String.trim_leading(property, "visit:")
+ {"imported_#{dim}s", String.to_existing_atom(dim)}
+ end
+
+ imported_q =
+ from(
+ i in table,
+ group_by: field(i, ^dim),
+ where: i.site_id == ^site.id,
+ where: i.date >= ^query.date_range.first and i.date <= ^query.date_range.last,
+ select: %{}
+ )
+ |> select_imported_metrics(metrics)
+
+ imported_q =
+ case query.filters[property] do
+ {:is_not, value} ->
+ value = if value == @no_ref, do: "", else: value
+ where(imported_q, [i], field(i, ^dim) != ^value)
+
+ {:member, list} ->
+ where(imported_q, [i], field(i, ^dim) in ^list)
+
+ _ ->
+ imported_q
+ end
+
+ imported_q =
+ case dim do
+ :source ->
+ imported_q
+ |> select_merge([i], %{
+ source: fragment("if(empty(?), ?, ?)", i.source, @no_ref, i.source)
+ })
+
+ :utm_medium ->
+ imported_q
+ |> select_merge([i], %{
+ utm_medium: fragment("if(empty(?), ?, ?)", i.utm_medium, @no_ref, i.utm_medium)
+ })
+
+ :utm_source ->
+ imported_q
+ |> select_merge([i], %{
+ utm_source: fragment("if(empty(?), ?, ?)", i.utm_source, @no_ref, i.utm_source)
+ })
+
+ :utm_campaign ->
+ imported_q
+ |> select_merge([i], %{
+ utm_campaign: fragment("if(empty(?), ?, ?)", i.utm_campaign, @no_ref, i.utm_campaign)
+ })
+
+ :utm_term ->
+ imported_q
+ |> select_merge([i], %{
+ utm_term: fragment("if(empty(?), ?, ?)", i.utm_term, @no_ref, i.utm_term)
+ })
+
+ :utm_content ->
+ imported_q
+ |> select_merge([i], %{
+ utm_content: fragment("if(empty(?), ?, ?)", i.utm_content, @no_ref, i.utm_content)
+ })
+
+ :page ->
+ imported_q
+ |> select_merge([i], %{
+ page: i.page,
+ time_on_page: sum(i.time_on_page)
+ })
+
+ :entry_page ->
+ imported_q
+ |> select_merge([i], %{
+ entry_page: i.entry_page,
+ visits: sum(i.entrances)
+ })
+
+ :exit_page ->
+ imported_q
+ |> select_merge([i], %{exit_page: i.exit_page, visits: sum(i.exits)})
+
+ :country ->
+ imported_q |> select_merge([i], %{country: i.country})
+
+ :region ->
+ imported_q |> select_merge([i], %{region: i.region})
+
+ :city ->
+ imported_q |> select_merge([i], %{city: i.city})
+
+ :device ->
+ imported_q |> select_merge([i], %{device: i.device})
+
+ :browser ->
+ imported_q |> select_merge([i], %{browser: i.browser})
+
+ :operating_system ->
+ imported_q |> select_merge([i], %{operating_system: i.operating_system})
+ end
+
+ q =
+ from(s in Ecto.Query.subquery(q),
+ full_join: i in subquery(imported_q),
+ on: field(s, ^dim) == field(i, ^dim)
+ )
+ |> select_joined_metrics(metrics)
+ |> apply_order_by(metrics)
+
+ case dim do
+ :source ->
+ q
+ |> select_merge([s, i], %{
+ source: fragment("if(empty(?), ?, ?)", s.source, i.source, s.source)
+ })
+
+ :utm_medium ->
+ q
+ |> select_merge([s, i], %{
+ utm_medium: fragment("if(empty(?), ?, ?)", s.utm_medium, i.utm_medium, s.utm_medium)
+ })
+
+ :utm_source ->
+ q
+ |> select_merge([s, i], %{
+ utm_source: fragment("if(empty(?), ?, ?)", s.utm_source, i.utm_source, s.utm_source)
+ })
+
+ :utm_campaign ->
+ q
+ |> select_merge([s, i], %{
+ utm_campaign:
+ fragment("if(empty(?), ?, ?)", s.utm_campaign, i.utm_campaign, s.utm_campaign)
+ })
+
+ :utm_term ->
+ q
+ |> select_merge([s, i], %{
+ utm_term: fragment("if(empty(?), ?, ?)", s.utm_term, i.utm_term, s.utm_term)
+ })
+
+ :utm_content ->
+ q
+ |> select_merge([s, i], %{
+ utm_content: fragment("if(empty(?), ?, ?)", s.utm_content, i.utm_content, s.utm_content)
+ })
+
+ :page ->
+ q
+ |> select_merge([s, i], %{
+ page: fragment("if(empty(?), ?, ?)", i.page, s.page, i.page)
+ })
+
+ :entry_page ->
+ q
+ |> select_merge([s, i], %{
+ entry_page: fragment("if(empty(?), ?, ?)", i.entry_page, s.entry_page, i.entry_page),
+ visits: fragment("? + ?", s.visits, i.visits)
+ })
+
+ :exit_page ->
+ q
+ |> select_merge([s, i], %{
+ exit_page: fragment("if(empty(?), ?, ?)", i.exit_page, s.exit_page, i.exit_page),
+ visits: fragment("coalesce(?, 0) + coalesce(?, 0)", s.visits, i.visits)
+ })
+
+ :country ->
+ q
+ |> select_merge([i, s], %{
+ country: fragment("if(empty(?), ?, ?)", s.country, i.country, s.country)
+ })
+
+ :region ->
+ q
+ |> select_merge([i, s], %{
+ region: fragment("if(empty(?), ?, ?)", s.region, i.region, s.region)
+ })
+
+ :city ->
+ q
+ |> select_merge([i, s], %{
+ city: fragment("coalesce(?, ?)", s.city, i.city)
+ })
+
+ :device ->
+ q
+ |> select_merge([i, s], %{
+ device: fragment("if(empty(?), ?, ?)", s.device, i.device, s.device)
+ })
+
+ :browser ->
+ q
+ |> select_merge([i, s], %{
+ browser: fragment("if(empty(?), ?, ?)", s.browser, i.browser, s.browser)
+ })
+
+ :operating_system ->
+ q
+ |> select_merge([i, s], %{
+ operating_system:
+ fragment(
+ "if(empty(?), ?, ?)",
+ s.operating_system,
+ i.operating_system,
+ s.operating_system
+ )
+ })
+ end
+ end
+
+ def merge_imported(q, site, query, :aggregate, metrics) do
+ imported_q =
+ from(
+ i in "imported_visitors",
+ where: i.site_id == ^site.id,
+ where: i.date >= ^query.date_range.first and i.date <= ^query.date_range.last,
+ select: %{}
+ )
+ |> select_imported_metrics(metrics)
+
+ from(
+ s in subquery(q),
+ cross_join: i in subquery(imported_q),
+ select: %{}
+ )
+ |> select_joined_metrics(metrics)
+ end
+
+ def merge_imported(q, _, _, _, _), do: q
+
+ defp select_imported_metrics(q, []), do: q
+
+ defp select_imported_metrics(q, [:visitors | rest]) do
+ q
+ |> select_merge([i], %{visitors: sum(i.visitors)})
+ |> select_imported_metrics(rest)
+ end
+
+ defp select_imported_metrics(q, [:pageviews | rest]) do
+ q
+ |> select_merge([i], %{pageviews: sum(i.pageviews)})
+ |> select_imported_metrics(rest)
+ end
+
+ defp select_imported_metrics(q, [:bounce_rate | rest]) do
+ q
+ |> select_merge([i], %{
+ bounces: sum(i.bounces),
+ visits: sum(i.visits)
+ })
+ |> select_imported_metrics(rest)
+ end
+
+ defp select_imported_metrics(q, [:visit_duration | rest]) do
+ q
+ |> select_merge([i], %{visit_duration: sum(i.visit_duration)})
+ |> select_imported_metrics(rest)
+ end
+
+ defp select_imported_metrics(q, [_ | rest]) do
+ q
+ |> select_imported_metrics(rest)
+ end
+
+ defp select_joined_metrics(q, []), do: q
+ # TODO: Reverse-engineering the native data bounces and total visit
+ # durations to combine with imported data is inefficient. Instead both
+ # queries should fetch bounces/total_visit_duration and visits and be
+ # used as subqueries to a main query that then find the bounce rate/avg
+ # visit_duration.
+
+ defp select_joined_metrics(q, [:visitors | rest]) do
+ q
+ |> select_merge([s, i], %{
+ :visitors => fragment("coalesce(?, 0) + coalesce(?, 0)", s.visitors, i.visitors)
+ })
+ |> select_joined_metrics(rest)
+ end
+
+ defp select_joined_metrics(q, [:pageviews | rest]) do
+ q
+ |> select_merge([s, i], %{
+ pageviews: fragment("coalesce(?, 0) + coalesce(?, 0)", s.pageviews, i.pageviews)
+ })
+ |> select_joined_metrics(rest)
+ end
+
+ defp select_joined_metrics(q, [:bounce_rate | rest]) do
+ q
+ |> select_merge([s, i], %{
+ bounce_rate:
+ fragment(
+ "round(100 * (coalesce(?, 0) + coalesce((? * ? / 100), 0)) / (coalesce(?, 0) + coalesce(?, 0)))",
+ i.bounces,
+ s.bounce_rate,
+ s.visits,
+ i.visits,
+ s.visits
+ )
+ })
+ |> select_joined_metrics(rest)
+ end
+
+ defp select_joined_metrics(q, [:visit_duration | rest]) do
+ q
+ |> select_merge([s, i], %{
+ visit_duration:
+ fragment(
+ "(? + ? * ?) / (? + ?)",
+ i.visit_duration,
+ s.visit_duration,
+ s.visits,
+ s.visits,
+ i.visits
+ )
+ })
+ |> select_joined_metrics(rest)
+ end
+
+ defp select_joined_metrics(q, [:sample_percent | rest]) do
+ q
+ |> select_merge([s, i], %{sample_percent: s.sample_percent})
+ |> select_joined_metrics(rest)
+ end
+
+ defp select_joined_metrics(q, [_ | rest]) do
+ q
+ |> select_joined_metrics(rest)
+ end
+
+ defp apply_order_by(q, [:visitors | rest]) do
+ order_by(q, [s, i], desc: fragment("coalesce(?, 0) + coalesce(?, 0)", s.visitors, i.visitors))
+ |> apply_order_by(rest)
+ end
+
+ defp apply_order_by(q, _), do: q
+end
diff --git a/lib/plausible/stats/query.ex b/lib/plausible/stats/query.ex
index 1342fd3970dd..525364f8f039 100644
--- a/lib/plausible/stats/query.ex
+++ b/lib/plausible/stats/query.ex
@@ -3,7 +3,8 @@ defmodule Plausible.Stats.Query do
interval: nil,
period: nil,
filters: %{},
- sample_threshold: 20_000_000
+ sample_threshold: 20_000_000,
+ include_imported: false
@default_sample_threshold 20_000_000
@@ -37,20 +38,21 @@ defmodule Plausible.Stats.Query do
Map.put(query, :date_range, Date.range(new_first, new_last))
end
- def from(tz, %{"period" => "realtime"} = params) do
- date = today(tz)
+ def from(site, %{"period" => "realtime"} = params) do
+ date = today(site.timezone)
%__MODULE__{
period: "realtime",
interval: "minute",
date_range: Date.range(date, date),
filters: parse_filters(params),
- sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
+ sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold),
+ include_imported: false
}
end
- def from(tz, %{"period" => "day"} = params) do
- date = parse_single_date(tz, params)
+ def from(site, %{"period" => "day"} = params) do
+ date = parse_single_date(site.timezone, params)
%__MODULE__{
period: "day",
@@ -59,10 +61,11 @@ defmodule Plausible.Stats.Query do
filters: parse_filters(params),
sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
}
+ |> maybe_include_imported(site, params)
end
- def from(tz, %{"period" => "7d"} = params) do
- end_date = parse_single_date(tz, params)
+ def from(site, %{"period" => "7d"} = params) do
+ end_date = parse_single_date(site.timezone, params)
start_date = end_date |> Timex.shift(days: -6)
%__MODULE__{
@@ -72,10 +75,11 @@ defmodule Plausible.Stats.Query do
filters: parse_filters(params),
sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
}
+ |> maybe_include_imported(site, params)
end
- def from(tz, %{"period" => "30d"} = params) do
- end_date = parse_single_date(tz, params)
+ def from(site, %{"period" => "30d"} = params) do
+ end_date = parse_single_date(site.timezone, params)
start_date = end_date |> Timex.shift(days: -30)
%__MODULE__{
@@ -85,10 +89,11 @@ defmodule Plausible.Stats.Query do
filters: parse_filters(params),
sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
}
+ |> maybe_include_imported(site, params)
end
- def from(tz, %{"period" => "month"} = params) do
- date = parse_single_date(tz, params)
+ def from(site, %{"period" => "month"} = params) do
+ date = parse_single_date(site.timezone, params)
start_date = Timex.beginning_of_month(date)
end_date = Timex.end_of_month(date)
@@ -100,11 +105,12 @@ defmodule Plausible.Stats.Query do
filters: parse_filters(params),
sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
}
+ |> maybe_include_imported(site, params)
end
- def from(tz, %{"period" => "6mo"} = params) do
+ def from(site, %{"period" => "6mo"} = params) do
end_date =
- parse_single_date(tz, params)
+ parse_single_date(site.timezone, params)
|> Timex.end_of_month()
start_date =
@@ -118,11 +124,12 @@ defmodule Plausible.Stats.Query do
filters: parse_filters(params),
sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
}
+ |> maybe_include_imported(site, params)
end
- def from(tz, %{"period" => "12mo"} = params) do
+ def from(site, %{"period" => "12mo"} = params) do
end_date =
- parse_single_date(tz, params)
+ parse_single_date(site.timezone, params)
|> Timex.end_of_month()
start_date =
@@ -136,19 +143,20 @@ defmodule Plausible.Stats.Query do
filters: parse_filters(params),
sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
}
+ |> maybe_include_imported(site, params)
end
- def from(tz, %{"period" => "custom", "from" => from, "to" => to} = params) do
+ def from(site, %{"period" => "custom", "from" => from, "to" => to} = params) do
new_params =
params
|> Map.delete("from")
|> Map.delete("to")
|> Map.put("date", Enum.join([from, to], ","))
- from(tz, new_params)
+ from(site, new_params)
end
- def from(_tz, %{"period" => "custom", "date" => date} = params) do
+ def from(site, %{"period" => "custom", "date" => date} = params) do
[from, to] = String.split(date, ",")
from_date = Date.from_iso8601!(String.trim(from))
to_date = Date.from_iso8601!(String.trim(to))
@@ -160,6 +168,7 @@ defmodule Plausible.Stats.Query do
filters: parse_filters(params),
sample_threshold: Map.get(params, "sample_threshold", @default_sample_threshold)
}
+ |> maybe_include_imported(site, params)
end
def from(tz, params) do
@@ -247,4 +256,19 @@ defmodule Plausible.Stats.Query do
defp parse_goal_filter("Visit " <> page), do: {:is, :page, page}
defp parse_goal_filter(event), do: {:is, :event, event}
+
+ defp maybe_include_imported(query, site, params) do
+ imported_data_requested = params["with_imported"] == "true"
+ has_imported_data = site.imported_data && site.imported_data.status == "ok"
+
+ date_range_overlaps =
+ has_imported_data && !Timex.after?(query.date_range.first, site.imported_data.end_date)
+
+ no_filters_applied = Enum.empty?(query.filters)
+
+ include_imported =
+ imported_data_requested && has_imported_data && date_range_overlaps && no_filters_applied
+
+ %{query | include_imported: !!include_imported}
+ end
end
diff --git a/lib/plausible/stats/timeseries.ex b/lib/plausible/stats/timeseries.ex
index 253598b03d57..694a7dd21ace 100644
--- a/lib/plausible/stats/timeseries.ex
+++ b/lib/plausible/stats/timeseries.ex
@@ -4,8 +4,8 @@ defmodule Plausible.Stats.Timeseries do
import Plausible.Stats.Base
use Plausible.Stats.Fragments
- @event_metrics ["visitors", "pageviews"]
- @session_metrics ["visits", "bounce_rate", "visit_duration"]
+ @event_metrics [:visitors, :pageviews]
+ @session_metrics [:visits, :bounce_rate, :visit_duration]
def timeseries(site, query, metrics) do
steps = buckets(query)
@@ -23,36 +23,34 @@ defmodule Plausible.Stats.Timeseries do
Enum.map(steps, fn step ->
empty_row(step, metrics)
- |> Map.merge(Enum.find(event_result, fn row -> row["date"] == step end) || %{})
- |> Map.merge(Enum.find(session_result, fn row -> row["date"] == step end) || %{})
+ |> Map.merge(Enum.find(event_result, fn row -> row[:date] == step end) || %{})
+ |> Map.merge(Enum.find(session_result, fn row -> row[:date] == step end) || %{})
end)
end
+ defp events_timeseries(_, _, []), do: []
+
defp events_timeseries(site, query, metrics) do
- from(e in base_event_query(site, query),
- group_by: fragment("date"),
- order_by: fragment("date"),
- select: %{}
- )
+ from(e in base_event_query(site, query), select: %{})
|> select_bucket(site, query)
|> select_event_metrics(metrics)
+ |> Plausible.Stats.Imported.merge_imported_timeseries(site, query, metrics)
|> ClickhouseRepo.all()
end
+ defp sessions_timeseries(_, _, []), do: []
+
defp sessions_timeseries(site, query, metrics) do
query = Query.treat_page_filter_as_entry_page(query)
- from(e in query_sessions(site, query),
- group_by: fragment("date"),
- order_by: fragment("date"),
- select: %{}
- )
+ from(e in query_sessions(site, query), select: %{})
|> select_bucket(site, query)
|> select_session_metrics(metrics)
+ |> Plausible.Stats.Imported.merge_imported_timeseries(site, query, metrics)
|> ClickhouseRepo.all()
end
- defp buckets(%Query{interval: "month"} = query) do
+ def buckets(%Query{interval: "month"} = query) do
n_buckets = Timex.diff(query.date_range.last, query.date_range.first, :months)
Enum.map(n_buckets..0, fn shift ->
@@ -62,11 +60,11 @@ defmodule Plausible.Stats.Timeseries do
end)
end
- defp buckets(%Query{interval: "date"} = query) do
+ def buckets(%Query{interval: "date"} = query) do
Enum.into(query.date_range, [])
end
- defp buckets(%Query{interval: "hour"} = query) do
+ def buckets(%Query{interval: "hour"} = query) do
Enum.map(0..23, fn step ->
Timex.to_datetime(query.date_range.first)
|> Timex.shift(hours: step)
@@ -74,55 +72,62 @@ defmodule Plausible.Stats.Timeseries do
end)
end
- defp buckets(%Query{period: "30m", interval: "minute"}) do
+ def buckets(%Query{period: "30m", interval: "minute"}) do
Enum.into(-30..-1, [])
end
- defp select_bucket(q, site, %Query{interval: "month"}) do
+ def select_bucket(q, site, %Query{interval: "month"}) do
from(
e in q,
+ group_by: fragment("toStartOfMonth(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
+ order_by: fragment("toStartOfMonth(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
select_merge: %{
- "date" =>
- fragment("toStartOfMonth(toTimeZone(?, ?)) as date", e.timestamp, ^site.timezone)
+ date: fragment("toStartOfMonth(toTimeZone(?, ?))", e.timestamp, ^site.timezone)
}
)
end
- defp select_bucket(q, site, %Query{interval: "date"}) do
+ def select_bucket(q, site, %Query{interval: "date"}) do
from(
e in q,
+ group_by: fragment("toDate(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
+ order_by: fragment("toDate(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
select_merge: %{
- "date" => fragment("toDate(toTimeZone(?, ?)) as date", e.timestamp, ^site.timezone)
+ date: fragment("toDate(toTimeZone(?, ?))", e.timestamp, ^site.timezone)
}
)
end
- defp select_bucket(q, site, %Query{interval: "hour"}) do
+ def select_bucket(q, site, %Query{interval: "hour"}) do
from(
e in q,
+ group_by: fragment("toStartOfHour(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
+ order_by: fragment("toStartOfHour(toTimeZone(?, ?))", e.timestamp, ^site.timezone),
select_merge: %{
- "date" => fragment("toStartOfHour(toTimeZone(?, ?)) as date", e.timestamp, ^site.timezone)
+ date: fragment("toStartOfHour(toTimeZone(?, ?))", e.timestamp, ^site.timezone)
}
)
end
- defp select_bucket(q, _site, %Query{interval: "minute"}) do
+ def select_bucket(q, _site, %Query{interval: "minute"}) do
from(
e in q,
+ group_by: fragment("dateDiff('minute', now(), ?)", e.timestamp),
+ order_by: fragment("dateDiff('minute', now(), ?)", e.timestamp),
select_merge: %{
- "date" => fragment("dateDiff('minute', now(), ?) as date", e.timestamp)
+ date: fragment("dateDiff('minute', now(), ?)", e.timestamp)
}
)
end
defp empty_row(date, metrics) do
- Enum.reduce(metrics, %{"date" => date}, fn metric, row ->
+ Enum.reduce(metrics, %{date: date}, fn metric, row ->
case metric do
- "pageviews" -> Map.merge(row, %{"pageviews" => 0})
- "visitors" -> Map.merge(row, %{"visitors" => 0})
- "visits" -> Map.merge(row, %{"visits" => 0})
- "bounce_rate" -> Map.merge(row, %{"bounce_rate" => nil})
- "visit_duration" -> Map.merge(row, %{"visit_duration" => nil})
+ :pageviews -> Map.merge(row, %{pageviews: 0})
+ :visitors -> Map.merge(row, %{visitors: 0})
+ :visits -> Map.merge(row, %{visits: 0})
+ :bounce_rate -> Map.merge(row, %{bounce_rate: nil})
+ :visit_duration -> Map.merge(row, %{:visit_duration => nil})
end
end)
end
diff --git a/lib/plausible_web/controllers/api/external_controller.ex b/lib/plausible_web/controllers/api/external_controller.ex
index af30ec0f2784..66ab5132c071 100644
--- a/lib/plausible_web/controllers/api/external_controller.ex
+++ b/lib/plausible_web/controllers/api/external_controller.ex
@@ -505,7 +505,7 @@ defmodule PlausibleWeb.Api.ExternalController do
defp clean_referrer(ref) do
uri = URI.parse(ref.referer)
- if right_uri?(uri) do
+ if PlausibleWeb.RefInspector.right_uri?(uri) do
host = String.replace_prefix(uri.host, "www.", "")
path = uri.path || ""
host <> String.trim_trailing(path, "/")
@@ -584,37 +584,9 @@ defmodule PlausibleWeb.Api.ExternalController do
defp get_referrer_source(query, ref) do
source = query["utm_source"] || query["source"] || query["ref"]
- source || get_source_from_referrer(ref)
+ source || PlausibleWeb.RefInspector.parse(ref)
end
- defp get_source_from_referrer(nil), do: nil
-
- defp get_source_from_referrer(ref) do
- case ref.source do
- :unknown ->
- clean_uri(ref.referer)
-
- source ->
- source
- end
- end
-
- defp clean_uri(uri) do
- uri = URI.parse(String.trim(uri))
-
- if right_uri?(uri) do
- String.replace_leading(uri.host, "www.", "")
- end
- end
-
- defp right_uri?(%URI{host: nil}), do: false
-
- defp right_uri?(%URI{host: host, scheme: scheme})
- when scheme in ["http", "https"] and byte_size(host) > 0,
- do: true
-
- defp right_uri?(_), do: false
-
defp decode_query_params(nil), do: nil
defp decode_query_params(%URI{query: nil}), do: nil
diff --git a/lib/plausible_web/controllers/api/external_stats_controller.ex b/lib/plausible_web/controllers/api/external_stats_controller.ex
index 3631785ddefe..554a1e50919e 100644
--- a/lib/plausible_web/controllers/api/external_stats_controller.ex
+++ b/lib/plausible_web/controllers/api/external_stats_controller.ex
@@ -6,7 +6,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
def realtime_visitors(conn, _params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, %{"period" => "realtime"})
+ query = Query.from(site, %{"period" => "realtime"})
json(conn, Plausible.Stats.Clickhouse.current_visitors(site, query))
end
@@ -16,7 +16,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
with :ok <- validate_period(params),
:ok <- validate_date(params),
- query <- Query.from(site.timezone, params),
+ query <- Query.from(site, params),
{:ok, metrics} <- parse_metrics(params, nil, query) do
results =
if params["compare"] == "previous_period" do
@@ -31,13 +31,13 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
10_000
)
- Enum.map(curr_result, fn {metric, %{"value" => current_val}} ->
- %{"value" => prev_val} = prev_result[metric]
+ Enum.map(curr_result, fn {metric, %{value: current_val}} ->
+ %{value: prev_val} = prev_result[metric]
{metric,
%{
- "value" => current_val,
- "change" => percent_change(prev_val, current_val)
+ value: current_val,
+ change: percent_change(prev_val, current_val)
}}
end)
|> Enum.into(%{})
@@ -45,7 +45,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
Plausible.Stats.aggregate(site, query, metrics)
end
- json(conn, %{"results" => results})
+ json(conn, %{results: Map.take(results, metrics)})
else
{:error, msg} ->
conn
@@ -61,7 +61,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
with :ok <- validate_period(params),
:ok <- validate_date(params),
{:ok, property} <- validate_property(params),
- query <- Query.from(site.timezone, params),
+ query <- Query.from(site, params),
{:ok, metrics} <- parse_metrics(params, property, query) do
limit = String.to_integer(Map.get(params, "limit", "100"))
page = String.to_integer(Map.get(params, "page", "1"))
@@ -72,13 +72,13 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
prop_names = Props.props(site, query)
Enum.map(results, fn row ->
- Map.put(row, "props", prop_names[row["goal"]] || [])
+ Map.put(row, "props", prop_names[row[:goal]] || [])
end)
else
results
end
- json(conn, %{"results" => results})
+ json(conn, %{results: results})
else
{:error, msg} ->
conn
@@ -133,7 +133,7 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
"The metric `#{invalid_metric}` is not recognized. Find valid metrics from the documentation: https://plausible.io/docs/stats-api#get-apiv1statsbreakdown"}
end
else
- {:ok, metrics}
+ {:ok, Enum.map(metrics, &String.to_atom/1)}
end
end
@@ -144,10 +144,11 @@ defmodule PlausibleWeb.Api.ExternalStatsController do
with :ok <- validate_period(params),
:ok <- validate_date(params),
:ok <- validate_interval(params),
- query <- Query.from(site.timezone, params),
+ query <- Query.from(site, params),
{:ok, metrics} <- parse_metrics(params, nil, query) do
graph = Plausible.Stats.timeseries(site, query, metrics)
- json(conn, %{"results" => graph})
+ metrics = metrics ++ [:date]
+ json(conn, %{results: Enum.map(graph, &Map.take(&1, metrics))})
else
{:error, msg} ->
conn
diff --git a/lib/plausible_web/controllers/api/stats_controller.ex b/lib/plausible_web/controllers/api/stats_controller.ex
index 85940990db9c..943aa98e331b 100644
--- a/lib/plausible_web/controllers/api/stats_controller.ex
+++ b/lib/plausible_web/controllers/api/stats_controller.ex
@@ -7,7 +7,7 @@ defmodule PlausibleWeb.Api.StatsController do
def main_graph(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
timeseries_query =
if query.period == "realtime" do
@@ -16,12 +16,12 @@ defmodule PlausibleWeb.Api.StatsController do
query
end
- timeseries = Task.async(fn -> Stats.timeseries(site, timeseries_query, ["visitors"]) end)
+ timeseries = Task.async(fn -> Stats.timeseries(site, timeseries_query, [:visitors]) end)
{top_stats, sample_percent} = fetch_top_stats(site, query)
timeseries_result = Task.await(timeseries)
- plot = Enum.map(timeseries_result, fn row -> row["visitors"] end)
- labels = Enum.map(timeseries_result, fn row -> row["date"] end)
+ plot = Enum.map(timeseries_result, fn row -> row[:visitors] end)
+ labels = Enum.map(timeseries_result, fn row -> row[:date] end)
present_index = present_index_for(site, query, labels)
json(conn, %{
@@ -30,7 +30,9 @@ defmodule PlausibleWeb.Api.StatsController do
present_index: present_index,
top_stats: top_stats,
interval: query.interval,
- sample_percent: sample_percent
+ sample_percent: sample_percent,
+ with_imported: query.include_imported,
+ imported_source: site.imported_data && site.imported_data.source
})
end
@@ -67,9 +69,9 @@ defmodule PlausibleWeb.Api.StatsController do
query_30m = %Query{query | period: "30m"}
%{
- "visitors" => %{"value" => visitors},
- "pageviews" => %{"value" => pageviews}
- } = Stats.aggregate(site, query_30m, ["visitors", "pageviews"])
+ visitors: %{value: visitors},
+ pageviews: %{value: pageviews}
+ } = Stats.aggregate(site, query_30m, [:visitors, :pageviews])
stats = [
%{
@@ -95,22 +97,22 @@ defmodule PlausibleWeb.Api.StatsController do
prev_total_query = Query.shift_back(total_q, site)
%{
- "visitors" => %{"value" => unique_visitors}
- } = Stats.aggregate(site, total_q, ["visitors"])
+ visitors: %{value: unique_visitors}
+ } = Stats.aggregate(site, total_q, [:visitors])
%{
- "visitors" => %{"value" => prev_unique_visitors}
- } = Stats.aggregate(site, prev_total_query, ["visitors"])
+ visitors: %{value: prev_unique_visitors}
+ } = Stats.aggregate(site, prev_total_query, [:visitors])
%{
- "visitors" => %{"value" => converted_visitors},
- "events" => %{"value" => completions}
- } = Stats.aggregate(site, query, ["visitors", "events"])
+ visitors: %{value: converted_visitors},
+ events: %{value: completions}
+ } = Stats.aggregate(site, query, [:visitors, :events])
%{
- "visitors" => %{"value" => prev_converted_visitors},
- "events" => %{"value" => prev_completions}
- } = Stats.aggregate(site, prev_query, ["visitors", "events"])
+ visitors: %{value: prev_converted_visitors},
+ events: %{value: prev_completions}
+ } = Stats.aggregate(site, prev_query, [:visitors, :events])
conversion_rate = calculate_cr(unique_visitors, converted_visitors)
prev_conversion_rate = calculate_cr(prev_unique_visitors, prev_converted_visitors)
@@ -146,9 +148,9 @@ defmodule PlausibleWeb.Api.StatsController do
metrics =
if query.filters["event:page"] do
- ["visitors", "pageviews", "bounce_rate", "time_on_page", "sample_percent"]
+ [:visitors, :pageviews, :bounce_rate, :time_on_page, :sample_percent]
else
- ["visitors", "pageviews", "bounce_rate", "visit_duration", "sample_percent"]
+ [:visitors, :pageviews, :bounce_rate, :visit_duration, :sample_percent]
end
current_results = Stats.aggregate(site, query, metrics)
@@ -156,28 +158,28 @@ defmodule PlausibleWeb.Api.StatsController do
stats =
[
- top_stats_entry(current_results, prev_results, "Unique visitors", "visitors"),
- top_stats_entry(current_results, prev_results, "Total pageviews", "pageviews"),
- top_stats_entry(current_results, prev_results, "Bounce rate", "bounce_rate"),
- top_stats_entry(current_results, prev_results, "Visit duration", "visit_duration"),
- top_stats_entry(current_results, prev_results, "Time on page", "time_on_page")
+ top_stats_entry(current_results, prev_results, "Unique visitors", :visitors),
+ top_stats_entry(current_results, prev_results, "Total pageviews", :pageviews),
+ top_stats_entry(current_results, prev_results, "Bounce rate", :bounce_rate),
+ top_stats_entry(current_results, prev_results, "Visit duration", :visit_duration),
+ top_stats_entry(current_results, prev_results, "Time on page", :time_on_page)
]
|> Enum.filter(& &1)
- {stats, current_results["sample_percent"]["value"]}
+ {stats, current_results[:sample_percent][:value]}
end
defp top_stats_entry(current_results, prev_results, name, key) do
if current_results[key] do
%{
name: name,
- value: current_results[key]["value"],
- change: calculate_change(key, prev_results[key]["value"], current_results[key]["value"])
+ value: current_results[key][:value],
+ change: calculate_change(key, prev_results[key][:value], current_results[key][:value])
}
end
end
- defp calculate_change("bounce_rate", old_count, new_count) do
+ defp calculate_change(:bounce_rate, old_count, new_count) do
if old_count > 0, do: new_count - old_count
end
@@ -202,27 +204,27 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site]
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Filters.add_prefix()
|> maybe_hide_noref("visit:source", params)
pagination = parse_pagination(params)
metrics =
- if params["detailed"], do: ["visitors", "bounce_rate", "visit_duration"], else: ["visitors"]
+ if params["detailed"], do: [:visitors, :bounce_rate, :visit_duration], else: [:visitors]
res =
Stats.breakdown(site, query, "visit:source", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "source", "visit:source")
- |> transform_keys(%{"source" => "name"})
+ |> maybe_add_cr(site, query, pagination, :source, "visit:source")
+ |> transform_keys(%{source: :name})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
res
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
+ res |> to_csv([:name, :visitors, :bounce_rate, :visit_duration])
end
else
json(conn, res)
@@ -233,25 +235,26 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site]
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Filters.add_prefix()
|> maybe_hide_noref("visit:utm_medium", params)
pagination = parse_pagination(params)
- metrics = ["visitors", "bounce_rate", "visit_duration"]
+
+ metrics = [:visitors, :bounce_rate, :visit_duration]
res =
Stats.breakdown(site, query, "visit:utm_medium", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "utm_medium", "visit:utm_medium")
- |> transform_keys(%{"utm_medium" => "name"})
+ |> maybe_add_cr(site, query, pagination, :utm_medium, "visit:utm_medium")
+ |> transform_keys(%{utm_medium: :name})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
res
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
+ res |> to_csv([:name, :visitors, :bounce_rate, :visit_duration])
end
else
json(conn, res)
@@ -262,25 +265,26 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site]
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Filters.add_prefix()
|> maybe_hide_noref("visit:utm_campaign", params)
pagination = parse_pagination(params)
- metrics = ["visitors", "bounce_rate", "visit_duration"]
+
+ metrics = [:visitors, :bounce_rate, :visit_duration]
res =
Stats.breakdown(site, query, "visit:utm_campaign", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "utm_campaign", "visit:utm_campaign")
- |> transform_keys(%{"utm_campaign" => "name"})
+ |> maybe_add_cr(site, query, pagination, :utm_campaign, "visit:utm_campaign")
+ |> transform_keys(%{utm_campaign: :name})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
res
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
+ res |> to_csv([:name, :visitors, :bounce_rate, :visit_duration])
end
else
json(conn, res)
@@ -291,25 +295,25 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site]
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Filters.add_prefix()
|> maybe_hide_noref("visit:utm_content", params)
pagination = parse_pagination(params)
- metrics = ["visitors", "bounce_rate", "visit_duration"]
+ metrics = [:visitors, :bounce_rate, :visit_duration]
res =
Stats.breakdown(site, query, "visit:utm_content", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "utm_content", "visit:utm_content")
- |> transform_keys(%{"utm_content" => "name"})
+ |> maybe_add_cr(site, query, pagination, :utm_content, "visit:utm_content")
+ |> transform_keys(%{utm_content: :name})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
res
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
+ res |> to_csv([:name, :visitors, :bounce_rate, :visit_duration])
end
else
json(conn, res)
@@ -320,25 +324,25 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site]
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Filters.add_prefix()
|> maybe_hide_noref("visit:utm_term", params)
pagination = parse_pagination(params)
- metrics = ["visitors", "bounce_rate", "visit_duration"]
+ metrics = [:visitors, :bounce_rate, :visit_duration]
res =
Stats.breakdown(site, query, "visit:utm_term", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "utm_term", "visit:utm_term")
- |> transform_keys(%{"utm_term" => "name"})
+ |> maybe_add_cr(site, query, pagination, :utm_term, "visit:utm_term")
+ |> transform_keys(%{utm_term: :name})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
res
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
+ res |> to_csv([:name, :visitors, :bounce_rate, :visit_duration])
end
else
json(conn, res)
@@ -349,25 +353,26 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site]
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Filters.add_prefix()
|> maybe_hide_noref("visit:utm_source", params)
pagination = parse_pagination(params)
- metrics = ["visitors", "bounce_rate", "visit_duration"]
+
+ metrics = [:visitors, :bounce_rate, :visit_duration]
res =
Stats.breakdown(site, query, "visit:utm_source", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "utm_source", "visit:utm_source")
- |> transform_keys(%{"utm_source" => "name"})
+ |> maybe_add_cr(site, query, pagination, :utm_source, "visit:utm_source")
+ |> transform_keys(%{utm_source: :name})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
res
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- res |> to_csv(["name", "visitors", "bounce_rate", "visit_duration"])
+ res |> to_csv([:name, :visitors, :bounce_rate, :visit_duration])
end
else
json(conn, res)
@@ -378,7 +383,7 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site] |> Repo.preload(:google_auth)
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Query.put_filter("source", "Google")
|> Filters.add_prefix()
@@ -387,7 +392,7 @@ defmodule PlausibleWeb.Api.StatsController do
google_api().fetch_stats(site, query, params["limit"] || 9)
end
- %{"visitors" => %{"value" => total_visitors}} = Stats.aggregate(site, query, ["visitors"])
+ %{:visitors => %{value: total_visitors}} = Stats.aggregate(site, query, [:visitors])
case search_terms do
nil ->
@@ -408,47 +413,48 @@ defmodule PlausibleWeb.Api.StatsController do
site = conn.assigns[:site]
query =
- Query.from(site.timezone, params)
+ Query.from(site, params)
|> Query.put_filter("source", referrer)
|> Filters.add_prefix()
pagination = parse_pagination(params)
metrics =
- if params["detailed"], do: ["visitors", "bounce_rate", "visit_duration"], else: ["visitors"]
+ if params["detailed"], do: [:visitors, :bounce_rate, :visit_duration], else: [:visitors]
referrers =
Stats.breakdown(site, query, "visit:referrer", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "referrer", "visit:referrer")
- |> transform_keys(%{"referrer" => "name"})
+ |> maybe_add_cr(site, query, pagination, :referrer, "visit:referrer")
+ |> transform_keys(%{referrer: :name})
+ |> Enum.map(&Map.drop(&1, [:visits]))
- %{"visitors" => %{"value" => total_visitors}} = Stats.aggregate(site, query, ["visitors"])
+ %{:visitors => %{value: total_visitors}} = Stats.aggregate(site, query, [:visitors])
json(conn, %{referrers: referrers, total_visitors: total_visitors})
end
def pages(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
metrics =
if params["detailed"],
- do: ["visitors", "pageviews", "bounce_rate", "time_on_page"],
- else: ["visitors"]
+ do: [:visitors, :pageviews, :bounce_rate, :time_on_page],
+ else: [:visitors]
pagination = parse_pagination(params)
pages =
Stats.breakdown(site, query, "event:page", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "page", "event:page")
- |> transform_keys(%{"page" => "name"})
+ |> maybe_add_cr(site, query, pagination, :page, "event:page")
+ |> transform_keys(%{page: :name})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
pages
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- pages |> to_csv(["name", "visitors", "bounce_rate", "time_on_page"])
+ pages |> to_csv([:name, :visitors, :bounce_rate, :time_on_page])
end
else
json(conn, pages)
@@ -457,26 +463,26 @@ defmodule PlausibleWeb.Api.StatsController do
def entry_pages(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
pagination = parse_pagination(params)
- metrics = ["visitors", "visits", "visit_duration"]
+ metrics = [:visitors, :visits, :visit_duration]
entry_pages =
Stats.breakdown(site, query, "visit:entry_page", metrics, pagination)
- |> maybe_add_cr(site, query, pagination, "entry_page", "visit:entry_page")
+ |> maybe_add_cr(site, query, pagination, :entry_page, "visit:entry_page")
|> transform_keys(%{
- "entry_page" => "name",
- "visitors" => "unique_entrances",
- "visits" => "total_entrances"
+ entry_page: :name,
+ visitors: :unique_entrances,
+ visits: :total_entrances
})
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
entry_pages
- |> transform_keys(%{"unique_entrances" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{unique_entrances: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- entry_pages |> to_csv(["name", "unique_entrances", "total_entrances", "visit_duration"])
+ entry_pages |> to_csv([:name, :unique_entrances, :total_entrances, :visit_duration])
end
else
json(conn, entry_pages)
@@ -485,20 +491,20 @@ defmodule PlausibleWeb.Api.StatsController do
def exit_pages(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
{limit, page} = parse_pagination(params)
- metrics = ["visitors", "visits"]
+ metrics = [:visitors, :visits]
exit_pages =
Stats.breakdown(site, query, "visit:exit_page", metrics, {limit, page})
- |> maybe_add_cr(site, query, {limit, page}, "exit_page", "visit:exit_page")
+ |> maybe_add_cr(site, query, {limit, page}, :exit_page, "visit:exit_page")
|> transform_keys(%{
- "exit_page" => "name",
- "visitors" => "unique_exits",
- "visits" => "total_exits"
+ exit_page: :name,
+ visitors: :unique_exits,
+ visits: :total_exits
})
- pages = Enum.map(exit_pages, & &1["name"])
+ pages = Enum.map(exit_pages, & &1[:name])
total_visits_query =
Query.put_filter(query, "event:page", {:member, pages})
@@ -508,29 +514,29 @@ defmodule PlausibleWeb.Api.StatsController do
|> Query.put_filter("visit:page", query.filters["event:page"])
total_pageviews =
- Stats.breakdown(site, total_visits_query, "event:page", ["pageviews"], {limit, 1})
+ Stats.breakdown(site, total_visits_query, "event:page", [:pageviews], {limit, 1})
exit_pages =
Enum.map(exit_pages, fn exit_page ->
exit_rate =
- case Enum.find(total_pageviews, &(&1["page"] == exit_page["name"])) do
- %{"pageviews" => pageviews} ->
- Float.floor(exit_page["total_exits"] / pageviews * 100)
+ case Enum.find(total_pageviews, &(&1[:page] == exit_page[:name])) do
+ %{pageviews: pageviews} ->
+ Float.floor(exit_page[:total_exits] / pageviews * 100)
nil ->
nil
end
- Map.put(exit_page, "exit_rate", exit_rate)
+ Map.put(exit_page, :exit_rate, exit_rate)
end)
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
exit_pages
- |> transform_keys(%{"unique_exits" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{unique_exits: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- exit_pages |> to_csv(["name", "unique_exits", "total_exits", "exit_rate"])
+ exit_pages |> to_csv([:name, :unique_exits, :total_exits, :exit_rate])
end
else
json(conn, exit_pages)
@@ -539,41 +545,55 @@ defmodule PlausibleWeb.Api.StatsController do
def countries(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+
+ query =
+ Query.from(site, params)
+ |> Filters.add_prefix()
+ |> Query.put_filter("visit:country", {:is_not, "\0\0"})
+
pagination = parse_pagination(params)
countries =
- Stats.breakdown(site, query, "visit:country", ["visitors"], pagination)
- |> maybe_add_cr(site, query, {300, 1}, "country", "visit:country")
- |> transform_keys(%{"country" => "code"})
+ Stats.breakdown(site, query, "visit:country", [:visitors], pagination)
+ |> maybe_add_cr(site, query, {300, 1}, :country, "visit:country")
+ |> transform_keys(%{country: :code})
|> maybe_add_percentages(query)
if params["csv"] do
countries =
countries
|> Enum.map(fn country ->
- country_info = get_country(country["code"])
- Map.put(country, "name", country_info.name)
+ country_info = get_country(country[:code])
+ Map.put(country, :name, country_info.name)
end)
if Map.has_key?(query.filters, "event:goal") do
countries
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- countries |> to_csv(["name", "visitors"])
+ countries |> to_csv([:name, :visitors])
end
else
countries =
Enum.map(countries, fn row ->
- country = get_country(row["code"])
-
- Map.merge(row, %{
- "name" => country.name,
- "flag" => country.flag,
- "alpha_3" => country.alpha_3,
- "code" => country.alpha_2
- })
+ country = get_country(row[:code])
+
+ if country do
+ Map.merge(row, %{
+ name: country.name,
+ flag: country.flag,
+ alpha_3: country.alpha_3,
+ code: country.alpha_2
+ })
+ else
+ Map.merge(row, %{
+ name: row[:code],
+ flag: "",
+ alpha_3: "",
+ code: ""
+ })
+ end
end)
json(conn, countries)
@@ -582,31 +602,36 @@ defmodule PlausibleWeb.Api.StatsController do
def regions(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+
+ query =
+ Query.from(site, params)
+ |> Filters.add_prefix()
+ |> Query.put_filter("visit:region", {:is_not, ""})
+
pagination = parse_pagination(params)
regions =
- Stats.breakdown(site, query, "visit:region", ["visitors"], pagination)
- |> transform_keys(%{"region" => "code"})
+ Stats.breakdown(site, query, "visit:region", [:visitors], pagination)
+ |> transform_keys(%{region: :code})
|> Enum.map(fn region ->
- region_entry = Location.get_subdivision(region["code"])
+ region_entry = Location.get_subdivision(region[:code])
if region_entry do
country_entry = get_country(region_entry.country_code)
- Map.merge(region, %{"name" => region_entry.name, "country_flag" => country_entry.flag})
+ Map.merge(region, %{name: region_entry.name, country_flag: country_entry.flag})
else
- Sentry.capture_message("Could not find region info", extra: %{code: region["code"]})
- Map.merge(region, %{"name" => region["code"]})
+ Sentry.capture_message("Could not find region info", extra: %{code: region[:code]})
+ Map.merge(region, %{name: region[:code]})
end
end)
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
regions
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- regions |> to_csv(["name", "visitors"])
+ regions |> to_csv([:name, :visitors])
end
else
json(conn, regions)
@@ -615,36 +640,41 @@ defmodule PlausibleWeb.Api.StatsController do
def cities(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+
+ query =
+ Query.from(site, params)
+ |> Filters.add_prefix()
+ |> Query.put_filter("visit:city", {:is_not, 0})
+
pagination = parse_pagination(params)
cities =
- Stats.breakdown(site, query, "visit:city", ["visitors"], pagination)
- |> transform_keys(%{"city" => "code"})
+ Stats.breakdown(site, query, "visit:city", [:visitors], pagination)
+ |> transform_keys(%{city: :code})
|> Enum.map(fn city ->
- city_info = Location.get_city(city["code"])
+ city_info = Location.get_city(city[:code])
if city_info do
country_info = get_country(city_info.country_code)
Map.merge(city, %{
- "name" => city_info.name,
- "country_flag" => country_info.flag
+ name: city_info.name,
+ country_flag: country_info.flag
})
else
- Sentry.capture_message("Could not find city info", extra: %{code: city["code"]})
+ Sentry.capture_message("Could not find city info", extra: %{code: city[:code]})
- Map.merge(city, %{"name" => "N/A"})
+ Map.merge(city, %{name: "N/A"})
end
end)
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
cities
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- cities |> to_csv(["name", "visitors"])
+ cities |> to_csv([:name, :visitors])
end
else
json(conn, cities)
@@ -653,22 +683,22 @@ defmodule PlausibleWeb.Api.StatsController do
def browsers(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
pagination = parse_pagination(params)
browsers =
- Stats.breakdown(site, query, "visit:browser", ["visitors"], pagination)
- |> maybe_add_cr(site, query, pagination, "browser", "visit:browser")
- |> transform_keys(%{"browser" => "name"})
+ Stats.breakdown(site, query, "visit:browser", [:visitors], pagination)
+ |> maybe_add_cr(site, query, pagination, :browser, "visit:browser")
+ |> transform_keys(%{browser: :name})
|> maybe_add_percentages(query)
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
browsers
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- browsers |> to_csv(["name", "visitors"])
+ browsers |> to_csv([:name, :visitors])
end
else
json(conn, browsers)
@@ -677,13 +707,13 @@ defmodule PlausibleWeb.Api.StatsController do
def browser_versions(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
pagination = parse_pagination(params)
versions =
- Stats.breakdown(site, query, "visit:browser_version", ["visitors"], pagination)
- |> maybe_add_cr(site, query, pagination, "browser_version", "visit:browser_version")
- |> transform_keys(%{"browser_version" => "name"})
+ Stats.breakdown(site, query, "visit:browser_version", [:visitors], pagination)
+ |> maybe_add_cr(site, query, pagination, :browser_version, "visit:browser_version")
+ |> transform_keys(%{browser_version: :name})
|> maybe_add_percentages(query)
json(conn, versions)
@@ -691,22 +721,22 @@ defmodule PlausibleWeb.Api.StatsController do
def operating_systems(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
pagination = parse_pagination(params)
systems =
- Stats.breakdown(site, query, "visit:os", ["visitors"], pagination)
- |> maybe_add_cr(site, query, pagination, "os", "visit:os")
- |> transform_keys(%{"os" => "name"})
+ Stats.breakdown(site, query, "visit:os", [:visitors], pagination)
+ |> maybe_add_cr(site, query, pagination, :os, "visit:os")
+ |> transform_keys(%{os: :name})
|> maybe_add_percentages(query)
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
systems
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- systems |> to_csv(["name", "visitors"])
+ systems |> to_csv([:name, :visitors])
end
else
json(conn, systems)
@@ -715,13 +745,13 @@ defmodule PlausibleWeb.Api.StatsController do
def operating_system_versions(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
pagination = parse_pagination(params)
versions =
- Stats.breakdown(site, query, "visit:os_version", ["visitors"], pagination)
- |> maybe_add_cr(site, query, pagination, "os_version", "visit:os_version")
- |> transform_keys(%{"os_version" => "name"})
+ Stats.breakdown(site, query, "visit:os_version", [:visitors], pagination)
+ |> maybe_add_cr(site, query, pagination, :os_version, "visit:os_version")
+ |> transform_keys(%{os_version: :name})
|> maybe_add_percentages(query)
json(conn, versions)
@@ -729,22 +759,22 @@ defmodule PlausibleWeb.Api.StatsController do
def screen_sizes(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
pagination = parse_pagination(params)
sizes =
- Stats.breakdown(site, query, "visit:device", ["visitors"], pagination)
- |> maybe_add_cr(site, query, pagination, "device", "visit:device")
- |> transform_keys(%{"device" => "name"})
+ Stats.breakdown(site, query, "visit:device", [:visitors], pagination)
+ |> maybe_add_cr(site, query, pagination, :device, "visit:device")
+ |> transform_keys(%{device: :name})
|> maybe_add_percentages(query)
if params["csv"] do
if Map.has_key?(query.filters, "event:goal") do
sizes
- |> transform_keys(%{"visitors" => "conversions"})
- |> to_csv(["name", "conversions", "conversion_rate"])
+ |> transform_keys(%{visitors: :conversions})
+ |> to_csv([:name, :conversions, :conversion_rate])
else
- sizes |> to_csv(["name", "visitors"])
+ sizes |> to_csv([:name, :visitors])
end
else
json(conn, sizes)
@@ -761,7 +791,7 @@ defmodule PlausibleWeb.Api.StatsController do
def conversions(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
query =
if query.period == "realtime" do
@@ -772,7 +802,7 @@ defmodule PlausibleWeb.Api.StatsController do
total_q = Query.remove_goal(query)
- %{"visitors" => %{"value" => total_visitors}} = Stats.aggregate(site, total_q, ["visitors"])
+ %{visitors: %{value: total_visitors}} = Stats.aggregate(site, total_q, [:visitors])
prop_names =
if query.filters["event:goal"] do
@@ -782,20 +812,20 @@ defmodule PlausibleWeb.Api.StatsController do
end
conversions =
- Stats.breakdown(site, query, "event:goal", ["visitors", "events"], {100, 1})
+ Stats.breakdown(site, query, "event:goal", [:visitors, :events], {100, 1})
|> transform_keys(%{
- "goal" => "name",
- "visitors" => "unique_conversions",
- "events" => "total_conversions"
+ goal: :name,
+ visitors: :unique_conversions,
+ events: :total_conversions
})
|> Enum.map(fn goal ->
goal
- |> Map.put(:prop_names, prop_names[goal["name"]])
- |> Map.put("conversion_rate", calculate_cr(total_visitors, goal["unique_conversions"]))
+ |> Map.put(:prop_names, prop_names[goal[:name]])
+ |> Map.put(:conversion_rate, calculate_cr(total_visitors, goal[:unique_conversions]))
end)
if params["csv"] do
- conversions |> to_csv(["name", "unique_conversions", "total_conversions"])
+ conversions |> to_csv([:name, :unique_conversions, :total_conversions])
else
json(conn, conversions)
end
@@ -803,27 +833,27 @@ defmodule PlausibleWeb.Api.StatsController do
def prop_breakdown(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
pagination = parse_pagination(params)
total_q = Query.remove_goal(query)
- %{"visitors" => %{"value" => unique_visitors}} = Stats.aggregate(site, total_q, ["visitors"])
+ %{:visitors => %{value: unique_visitors}} = Stats.aggregate(site, total_q, [:visitors])
prop_name = "event:props:" <> params["prop_name"]
props =
- Stats.breakdown(site, query, prop_name, ["visitors", "events"], pagination)
+ Stats.breakdown(site, query, prop_name, [:visitors, :events], pagination)
|> transform_keys(%{
- params["prop_name"] => "name",
- "events" => "total_conversions",
- "visitors" => "unique_conversions"
+ params["prop_name"] => :name,
+ :events => :total_conversions,
+ :visitors => :unique_conversions
})
|> Enum.map(fn prop ->
Map.put(
prop,
- "conversion_rate",
- calculate_cr(unique_visitors, prop["unique_conversions"])
+ :conversion_rate,
+ calculate_cr(unique_visitors, prop[:unique_conversions])
)
end)
@@ -836,9 +866,7 @@ defmodule PlausibleWeb.Api.StatsController do
def all_props_breakdown(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
-
- headers = ["prop", "name", "unique_conversions", "total_conversions"]
+ query = Query.from(site, params) |> Filters.add_prefix()
prop_names =
if query.filters["event:goal"] do
@@ -854,11 +882,11 @@ defmodule PlausibleWeb.Api.StatsController do
prop_names
|> Enum.map(fn prop ->
prop_breakdown(conn, Map.put(params, "prop_name", prop))
- |> Enum.map(&Map.put(&1, "prop", prop))
+ |> Enum.map(&Map.put(&1, :prop, prop))
end)
|> Enum.concat()
- to_csv(values, headers)
+ to_csv(values, [:prop, :name, :unique_conversions, :total_conversions])
end
def current_visitors(conn, _) do
@@ -874,7 +902,7 @@ defmodule PlausibleWeb.Api.StatsController do
def filter_suggestions(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
json(conn, Stats.filter_suggestions(site, query, params["filter_name"], params["q"]))
end
@@ -898,10 +926,10 @@ defmodule PlausibleWeb.Api.StatsController do
if Map.has_key?(query.filters, "event:goal") do
stat_list
else
- total = Enum.reduce(stat_list, 0, fn %{"visitors" => count}, total -> total + count end)
+ total = Enum.reduce(stat_list, 0, fn %{visitors: count}, total -> total + count end)
Enum.map(stat_list, fn stat ->
- Map.put(stat, "percentage", round(stat["visitors"] / total * 100))
+ Map.put(stat, :percentage, round(stat[:visitors] / total * 100))
end)
end
end
@@ -922,8 +950,8 @@ defmodule PlausibleWeb.Api.StatsController do
without_goal = Enum.find(list_without_goals, fn s -> s[key_name] === item[key_name] end)
item
- |> Map.put(:total_visitors, without_goal["visitors"])
- |> Map.put("conversion_rate", calculate_cr(without_goal["visitors"], item["visitors"]))
+ |> Map.put(:total_visitors, without_goal[:visitors])
+ |> Map.put(:conversion_rate, calculate_cr(without_goal[:visitors], item[:visitors]))
end)
end
@@ -939,7 +967,7 @@ defmodule PlausibleWeb.Api.StatsController do
|> Query.remove_goal()
res_without_goal =
- Stats.breakdown(site, query_without_goal, filter_name, ["visitors"], pagination)
+ Stats.breakdown(site, query_without_goal, filter_name, [:visitors], pagination)
list
|> add_cr(res_without_goal, key_name)
diff --git a/lib/plausible_web/controllers/auth_controller.ex b/lib/plausible_web/controllers/auth_controller.ex
index d60ca17dbbda..917e0201d40a 100644
--- a/lib/plausible_web/controllers/auth_controller.ex
+++ b/lib/plausible_web/controllers/auth_controller.ex
@@ -536,12 +536,14 @@ defmodule PlausibleWeb.AuthController do
|> redirect(to: redirect_to)
end
- def google_auth_callback(conn, %{"code" => code, "state" => site_id}) do
+ def google_auth_callback(conn, %{"code" => code, "state" => state}) do
res = Plausible.Google.Api.fetch_access_token(code)
id_token = res["id_token"]
[_, body, _] = String.split(id_token, ".")
id = body |> Base.decode64!(padding: false) |> Jason.decode!()
+ [site_id, redirect_to] = Jason.decode!(state)
+
Plausible.Site.GoogleAuth.changeset(%Plausible.Site.GoogleAuth{}, %{
email: id["email"],
refresh_token: res["refresh_token"],
@@ -554,6 +556,6 @@ defmodule PlausibleWeb.AuthController do
site = Repo.get(Plausible.Site, site_id)
- redirect(conn, to: "/#{URI.encode_www_form(site.domain)}/settings/search-console")
+ redirect(conn, to: "/#{URI.encode_www_form(site.domain)}/settings/#{redirect_to}")
end
end
diff --git a/lib/plausible_web/controllers/site_controller.ex b/lib/plausible_web/controllers/site_controller.ex
index 068cd3b040b9..7ec776fddd68 100644
--- a/lib/plausible_web/controllers/site_controller.ex
+++ b/lib/plausible_web/controllers/site_controller.ex
@@ -167,15 +167,28 @@ defmodule PlausibleWeb.SiteController do
redirect(conn, to: Routes.site_path(conn, :settings_general, website))
end
+ defp can_trigger_import(site) do
+ no_import = is_nil(site.imported_data) || site.imported_data.status == "error"
+
+ no_import && site.google_auth
+ end
+
def settings_general(conn, _params) do
site =
conn.assigns[:site]
- |> Repo.preload(:custom_domain)
+ |> Repo.preload([:custom_domain, :google_auth])
+
+ google_profiles =
+ if can_trigger_import(site) do
+ Plausible.Google.Api.get_analytics_view_ids(site)
+ end
conn
|> assign(:skip_plausible_tracking, true)
|> render("settings_general.html",
site: site,
+ google_profiles: google_profiles,
+ imported_data: site.imported_data,
changeset: Plausible.Site.changeset(site, %{}),
layout: {PlausibleWeb.LayoutView, "site_settings.html"}
)
@@ -295,9 +308,21 @@ defmodule PlausibleWeb.SiteController do
Repo.delete!(site.google_auth)
- conn
- |> put_flash(:success, "Google account unlinked from Plausible")
- |> redirect(to: Routes.site_path(conn, :settings_search_console, site.domain))
+ conn = put_flash(conn, :success, "Google account unlinked from Plausible")
+
+ panel =
+ conn.path_info
+ |> List.last()
+ |> String.split("-")
+ |> List.last()
+
+ case panel do
+ "search" ->
+ redirect(conn, to: Routes.site_path(conn, :settings_search_console, site.domain))
+
+ "import" ->
+ redirect(conn, to: Routes.site_path(conn, :settings_general, site.domain))
+ end
end
def update_settings(conn, %{"site" => site_params}) do
@@ -617,4 +642,60 @@ defmodule PlausibleWeb.SiteController do
|> put_flash(:success, "Custom domain deleted successfully")
|> redirect(to: "/#{URI.encode_www_form(site.domain)}/settings/general")
end
+
+ def import_from_google(conn, %{"profile" => profile}) do
+ site =
+ conn.assigns[:site]
+ |> Repo.preload(:google_auth)
+
+ cond do
+ site.imported_data ->
+ conn
+ |> put_flash(:error, "Data already imported from: #{site.imported_data.source}")
+ |> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
+
+ profile == "" ->
+ conn
+ |> put_flash(:error, "A Google Analytics profile must be selected")
+ |> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
+
+ true ->
+ job =
+ Plausible.Workers.ImportGoogleAnalytics.new(%{
+ "site_id" => site.id,
+ "profile" => profile
+ })
+
+ Ecto.Multi.new()
+ |> Ecto.Multi.update(:update_site, Plausible.Site.start_import(site, "Google Analytics"))
+ |> Oban.insert(:oban_job, job)
+ |> Repo.transaction()
+
+ conn
+ |> put_flash(:success, "Import scheduled. An email will be sent when it completes.")
+ |> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
+ end
+ end
+
+ def forget_imported(conn, _params) do
+ site = conn.assigns[:site]
+
+ cond do
+ site.imported_data ->
+ Plausible.Imported.forget(site)
+
+ site
+ |> Plausible.Site.remove_imported_data()
+ |> Repo.update!()
+
+ conn
+ |> put_flash(:success, "Imported data has been forgotten")
+ |> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
+
+ true ->
+ conn
+ |> put_flash(:error, "No data has been imported")
+ |> redirect(to: Routes.site_path(conn, :settings_general, site.domain))
+ end
+ end
end
diff --git a/lib/plausible_web/controllers/stats_controller.ex b/lib/plausible_web/controllers/stats_controller.ex
index 9352a9e1b6a1..e925904e6a04 100644
--- a/lib/plausible_web/controllers/stats_controller.ex
+++ b/lib/plausible_web/controllers/stats_controller.ex
@@ -48,11 +48,11 @@ defmodule PlausibleWeb.StatsController do
"""
def csv_export(conn, params) do
site = conn.assigns[:site]
- query = Query.from(site.timezone, params) |> Filters.add_prefix()
+ query = Query.from(site, params) |> Filters.add_prefix()
- metrics = ["visitors", "pageviews", "bounce_rate", "visit_duration"]
+ metrics = [:visitors, :pageviews, :bounce_rate, :visit_duration]
graph = Plausible.Stats.timeseries(site, query, metrics)
- headers = ["date" | metrics]
+ headers = [:date | metrics]
visitors =
Enum.map(graph, fn row -> Enum.map(headers, &row[&1]) end)
diff --git a/lib/plausible_web/email.ex b/lib/plausible_web/email.ex
index ab1c5870ad8b..2508babdbbf4 100644
--- a/lib/plausible_web/email.ex
+++ b/lib/plausible_web/email.ex
@@ -280,6 +280,31 @@ defmodule PlausibleWeb.Email do
)
end
+ def import_success(user, site) do
+ base_email()
+ |> to(user)
+ |> tag("import-success-email")
+ |> subject("Google Analytics data imported for #{site.domain}")
+ |> render("google_analytics_import.html", %{
+ site: site,
+ link: PlausibleWeb.Endpoint.url() <> "/" <> URI.encode_www_form(site.domain),
+ user: user,
+ success: true
+ })
+ end
+
+ def import_failure(user, site) do
+ base_email()
+ |> to(user)
+ |> tag("import-failure-email")
+ |> subject("Google Analytics import failed for #{site.domain}")
+ |> render("google_analytics_import.html", %{
+ user: user,
+ site: site,
+ success: false
+ })
+ end
+
defp base_email() do
mailer_from = Application.get_env(:plausible, :mailer_email)
diff --git a/lib/plausible_web/refinspector.ex b/lib/plausible_web/refinspector.ex
new file mode 100644
index 000000000000..154ab9476b22
--- /dev/null
+++ b/lib/plausible_web/refinspector.ex
@@ -0,0 +1,25 @@
+defmodule PlausibleWeb.RefInspector do
+ def parse(nil), do: nil
+
+ def parse(ref) do
+ case ref.source do
+ :unknown ->
+ uri = URI.parse(String.trim(ref.referer))
+
+ if right_uri?(uri) do
+ String.replace_leading(uri.host, "www.", "")
+ end
+
+ source ->
+ source
+ end
+ end
+
+ def right_uri?(%URI{host: nil}), do: false
+
+ def right_uri?(%URI{host: host, scheme: scheme})
+ when scheme in ["http", "https"] and byte_size(host) > 0,
+ do: true
+
+ def right_uri?(_), do: false
+end
diff --git a/lib/plausible_web/router.ex b/lib/plausible_web/router.ex
index 64080a668883..c118d053daa2 100644
--- a/lib/plausible_web/router.ex
+++ b/lib/plausible_web/router.ex
@@ -233,11 +233,15 @@ defmodule PlausibleWeb.Router do
delete "/:website/goals/:id", SiteController, :delete_goal
put "/:website/settings", SiteController, :update_settings
put "/:website/settings/google", SiteController, :update_google_auth
- delete "/:website/settings/google", SiteController, :delete_google_auth
+ delete "/:website/settings/google-search", SiteController, :delete_google_auth
+ delete "/:website/settings/google-import", SiteController, :delete_google_auth
delete "/:website", SiteController, :delete_site
delete "/:website/stats", SiteController, :reset_stats
get "/:domain/export", StatsController, :csv_export
get "/:domain/*path", StatsController, :stats
+
+ post "/:website/settings/google-import", SiteController, :import_from_google
+ delete "/:website/settings/forget-imported", SiteController, :forget_imported
end
end
diff --git a/lib/plausible_web/templates/email/google_analytics_import.html.eex b/lib/plausible_web/templates/email/google_analytics_import.html.eex
new file mode 100644
index 000000000000..d56f3f0cf407
--- /dev/null
+++ b/lib/plausible_web/templates/email/google_analytics_import.html.eex
@@ -0,0 +1,13 @@
+Hey <%= user_salutation(@user) %>,
+
+<%= if @success do %>
+ Your Google Analytics import has completed.
+
+ View dashboard: @link
+<% else %>
+ Unfortunately, your Google Analytics import failed.
+<% end %>
+
+--
+
+<%= plausible_url() %>
diff --git a/lib/plausible_web/templates/email/weekly_report.html.eex b/lib/plausible_web/templates/email/weekly_report.html.eex
index 6756d75669c2..3e61ec0a781a 100644
--- a/lib/plausible_web/templates/email/weekly_report.html.eex
+++ b/lib/plausible_web/templates/email/weekly_report.html.eex
@@ -434,7 +434,7 @@ body {
-
<%= source["source"] %>
+
<%= source[:source] %>
@@ -453,7 +453,7 @@ body {
-
<%= PlausibleWeb.StatsView.large_number_format(source["visitors"]) %>
+
<%= PlausibleWeb.StatsView.large_number_format(source[:visitors]) %>
@@ -563,7 +563,7 @@ body {
-
<%= page["page"] %>
+
<%= page[:page] %>
@@ -582,7 +582,7 @@ body {
-
<%= PlausibleWeb.StatsView.large_number_format(page["visitors"]) %>
+
<%= PlausibleWeb.StatsView.large_number_format(page[:visitors]) %>
diff --git a/lib/plausible_web/templates/site/settings_general.html.eex b/lib/plausible_web/templates/site/settings_general.html.eex
index fd5dc21a1e01..689f4698878d 100644
--- a/lib/plausible_web/templates/site/settings_general.html.eex
+++ b/lib/plausible_web/templates/site/settings_general.html.eex
@@ -46,3 +46,73 @@
<% end %>
+
+
+
+
+ <%= if Keyword.get(Application.get_env(:plausible, :google), :client_id) do %>
+ <%= cond do %>
+ <% @imported_data && @imported_data.status == "importing" -> %>
+
+
We are importing data from <%= @imported_data.source %> in the background... You will receive an email when it's completed
+
+ <% @imported_data && @imported_data.status == "ok" -> %>
+
+
+
+ Forget Imported Data
+
+
+ Removes all data imported from <%= @imported_data.source %>
+
+
+ <%= link("Forget imported stats", to: "/#{URI.encode_www_form(@site.domain)}/settings/forget-imported", method: :delete, class: "inline-block mt-4 px-4 py-2 border border-gray-300 dark:border-gray-500 text-sm leading-5 font-medium rounded-md text-red-700 bg-white dark:bg-gray-800 hover:text-red-500 dark:hover:text-red-400 focus:outline-none focus:border-blue-300 focus:ring active:text-red-800 active:bg-gray-50 transition ease-in-out duration-150") %>
+
+
+ <%= if @site.google_auth do %>
+ <%= link("Unlink Google account", to: "/#{URI.encode_www_form(@site.domain)}/settings/google-import", class: "inline-block mt-4 px-4 py-2 border border-gray-300 dark:border-gray-500 text-sm leading-5 font-medium rounded-md text-red-700 bg-white dark:bg-gray-800 hover:text-red-500 dark:hover:text-red-400 focus:outline-none focus:border-blue-300 focus:ring active:text-red-800 active:bg-gray-50 transition ease-in-out duration-150", method: "delete") %>
+ <% end %>
+
+ <% @site.google_auth -> %>
+
+
Linked Google account: <%= @site.google_auth.email %>
+
+ <%= case @google_profiles do %>
+ <% {:ok, profiles} -> %>
+
+ Select the Google Analytics profile you would like to import data from.
+
+
+ <%= form_for @conn, "/#{URI.encode_www_form(@site.domain)}/settings/google-import", [class: "max-w-xs"], fn f -> %>
+
+
+ <%= select f, :profile, profiles, prompt: "(Choose profile)", class: "dark:bg-gray-800 mt-1 block w-full pl-3 pr-10 py-2 text-base border-gray-300 dark:border-gray-500 outline-none focus:outline-none focus:ring-indigo-500 focus:border-indigo-500 sm:text-sm rounded-md dark:text-gray-100" %>
+
+
+ <%= submit "Import", class: "button" %>
+ <% end %>
+
+ <% {:error, error} -> %>
+
The following error occurred when fetching your Google Analytics profiles.
+
<%= error %>
+ <% end %>
+
+ <%= link("Unlink Google account", to: "/#{URI.encode_www_form(@site.domain)}/settings/google-import", class: "inline-block mt-4 px-4 py-2 border border-gray-300 dark:border-gray-500 text-sm leading-5 font-medium rounded-md text-red-700 bg-white dark:bg-gray-800 hover:text-red-500 dark:hover:text-red-400 focus:outline-none focus:border-blue-300 focus:ring active:text-red-800 active:bg-gray-50 transition ease-in-out duration-150", method: "delete") %>
+
+ <% true -> %>
+ <%= button("Continue with Google", to: Plausible.Google.Api.authorize_url(@site.id, "general"), class: "button mt-8") %>
+ <% end %>
+ <% else %>
+
+
+
An extra step is needed to set up your Plausible Analytics Self Hosted for the Google Search Console integration.
+ Find instructions <%= link("here", to: "https://plausible.io/docs/self-hosting-configuration#google-search-integration", class: "text-indigo-500") %>
+
+ <% end %>
+
diff --git a/lib/plausible_web/templates/site/settings_search_console.html.eex b/lib/plausible_web/templates/site/settings_search_console.html.eex
index 0177ec2894a8..5181e906e01f 100644
--- a/lib/plausible_web/templates/site/settings_search_console.html.eex
+++ b/lib/plausible_web/templates/site/settings_search_console.html.eex
@@ -12,7 +12,7 @@
Linked Google account: <%= @site.google_auth.email %>
- <%= link("Unlink Google account", to: "/#{URI.encode_www_form(@site.domain)}/settings/google", class: "inline-block mt-4 px-4 py-2 border border-gray-300 dark:border-gray-500 text-sm leading-5 font-medium rounded-md text-red-700 bg-white dark:bg-gray-800 hover:text-red-500 dark:hover:text-red-400 focus:outline-none focus:border-blue-300 focus:ring active:text-red-800 active:bg-gray-50 transition ease-in-out duration-150", method: "delete") %>
+ <%= link("Unlink Google account", to: "/#{URI.encode_www_form(@site.domain)}/settings/google-search", class: "inline-block mt-4 px-4 py-2 border border-gray-300 dark:border-gray-500 text-sm leading-5 font-medium rounded-md text-red-700 bg-white dark:bg-gray-800 hover:text-red-500 dark:hover:text-red-400 focus:outline-none focus:border-blue-300 focus:ring active:text-red-800 active:bg-gray-50 transition ease-in-out duration-150", method: "delete") %>
<%= case @search_console_domains do %>
<% {:ok, domains} -> %>
@@ -40,7 +40,7 @@
<%= error %>
<% end %>
<% else %>
- <%= button("Continue with Google", to: Plausible.Google.Api.authorize_url(@site.id), class: "button mt-8") %>
+ <%= button("Continue with Google", to: Plausible.Google.Api.authorize_url(@site.id, "search-console"), class: "button mt-8") %>
NB: You also need to set up your site on <%= link("Google Search Console", to: "https://search.google.com/search-console/about") %> for the integration to work. <%= link("Read the docs", to: "https://plausible.io/docs/google-search-console-integration", class: "text-indigo-500", rel: "noreferrer") %>
diff --git a/lib/workers/import_google_analytics.ex b/lib/workers/import_google_analytics.ex
new file mode 100644
index 000000000000..0e32b8c109a4
--- /dev/null
+++ b/lib/workers/import_google_analytics.ex
@@ -0,0 +1,46 @@
+defmodule Plausible.Workers.ImportGoogleAnalytics do
+ use Plausible.Repo
+
+ use Oban.Worker,
+ queue: :google_analytics_imports,
+ max_attempts: 1,
+ unique: [fields: [:args], period: 60]
+
+ @impl Oban.Worker
+ def perform(
+ %Oban.Job{args: %{"site_id" => site_id, "profile" => profile}},
+ google_api \\ Plausible.Google.Api
+ ) do
+ site =
+ Repo.get(Plausible.Site, site_id)
+ |> Repo.preload([:google_auth, [memberships: :user]])
+
+ case google_api.import_analytics(site, profile) do
+ {:ok, _} ->
+ Plausible.Site.import_success(site)
+ |> Repo.update!()
+
+ Enum.each(site.memberships, fn membership ->
+ if membership.role in [:owner, :admin] do
+ PlausibleWeb.Email.import_success(membership.user, site)
+ |> Plausible.Mailer.send_email_safe()
+ end
+ end)
+
+ :ok
+
+ {:error, error} ->
+ Plausible.Site.import_failure(site)
+ |> Repo.update!()
+
+ Enum.each(site.memberships, fn membership ->
+ if membership.role in [:owner, :admin] do
+ PlausibleWeb.Email.import_failure(membership.user, site)
+ |> Plausible.Mailer.send_email_safe()
+ end
+ end)
+
+ {:error, error}
+ end
+ end
+end
diff --git a/lib/workers/send_email_report.ex b/lib/workers/send_email_report.ex
index 5438a10ae5f4..5a7eb477b9bc 100644
--- a/lib/workers/send_email_report.ex
+++ b/lib/workers/send_email_report.ex
@@ -9,7 +9,7 @@ defmodule Plausible.Workers.SendEmailReport do
site = Repo.get(Plausible.Site, site_id) |> Repo.preload(:weekly_report)
today = Timex.now(site.timezone) |> DateTime.to_date()
date = Timex.shift(today, weeks: -1) |> Timex.end_of_week() |> Date.to_iso8601()
- query = Query.from(site.timezone, %{"period" => "7d", "date" => date})
+ query = Query.from(site, %{"period" => "7d", "date" => date})
for email <- site.weekly_report.recipients do
unsubscribe_link =
@@ -32,7 +32,7 @@ defmodule Plausible.Workers.SendEmailReport do
|> Timex.beginning_of_month()
query =
- Query.from(site.timezone, %{
+ Query.from(site, %{
"period" => "month",
"date" => Timex.format!(last_month, "{ISOdate}")
})
@@ -50,26 +50,26 @@ defmodule Plausible.Workers.SendEmailReport do
defp send_report(email, site, name, unsubscribe_link, query) do
prev_query = Query.shift_back(query, site)
- curr_period = Stats.aggregate(site, query, ["pageviews", "visitors", "bounce_rate"])
- prev_period = Stats.aggregate(site, prev_query, ["pageviews", "visitors", "bounce_rate"])
+ curr_period = Stats.aggregate(site, query, [:pageviews, :visitors, :bounce_rate])
+ prev_period = Stats.aggregate(site, prev_query, [:pageviews, :visitors, :bounce_rate])
- change_pageviews = Stats.Compare.calculate_change("pageviews", prev_period, curr_period)
- change_visitors = Stats.Compare.calculate_change("visitors", prev_period, curr_period)
- change_bounce_rate = Stats.Compare.calculate_change("bounce_rate", prev_period, curr_period)
+ change_pageviews = Stats.Compare.calculate_change(:pageviews, prev_period, curr_period)
+ change_visitors = Stats.Compare.calculate_change(:visitors, prev_period, curr_period)
+ change_bounce_rate = Stats.Compare.calculate_change(:bounce_rate, prev_period, curr_period)
source_query = Query.put_filter(query, "visit:source", {:is_not, "Direct / None"})
- sources = Stats.breakdown(site, source_query, "visit:source", ["visitors"], {5, 1})
- pages = Stats.breakdown(site, query, "event:page", ["visitors"], {5, 1})
+ sources = Stats.breakdown(site, source_query, "visit:source", [:visitors], {5, 1})
+ pages = Stats.breakdown(site, query, "event:page", [:visitors], {5, 1})
user = Plausible.Auth.find_user_by(email: email)
login_link = user && Plausible.Sites.is_member?(user.id, site)
template =
PlausibleWeb.Email.weekly_report(email, site,
- unique_visitors: curr_period["visitors"]["value"],
+ unique_visitors: curr_period[:visitors][:value],
change_visitors: change_visitors,
- pageviews: curr_period["pageviews"]["value"],
+ pageviews: curr_period[:pageviews][:value],
change_pageviews: change_pageviews,
- bounce_rate: curr_period["bounce_rate"]["value"],
+ bounce_rate: curr_period[:bounce_rate][:value],
change_bounce_rate: change_bounce_rate,
sources: sources,
unsubscribe_link: unsubscribe_link,
diff --git a/lib/workers/spike_notifier.ex b/lib/workers/spike_notifier.ex
index a336b34e5d47..52cde6cf1a7f 100644
--- a/lib/workers/spike_notifier.ex
+++ b/lib/workers/spike_notifier.ex
@@ -19,7 +19,7 @@ defmodule Plausible.Workers.SpikeNotifier do
)
for notification <- notifications do
- query = Query.from(notification.site.timezone, %{"period" => "realtime"})
+ query = Query.from(notification.site, %{"period" => "realtime"})
current_visitors = clickhouse.current_visitors(notification.site, query)
if current_visitors >= notification.threshold do
diff --git a/mix.lock b/mix.lock
index 8d04a0ea8cd0..360aee29f41e 100644
--- a/mix.lock
+++ b/mix.lock
@@ -9,7 +9,7 @@
"cachex": {:hex, :cachex, "3.4.0", "868b2959ea4aeb328c6b60ff66c8d5123c083466ad3c33d3d8b5f142e13101fb", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "370123b1ab4fba4d2965fb18f87fd758325709787c8c5fce35b3fe80645ccbe5"},
"certifi": {:hex, :certifi, "2.8.0", "d4fb0a6bb20b7c9c3643e22507e42f356ac090a1dcea9ab99e27e0376d695eba", [:rebar3], [], "hexpm", "6ac7efc1c6f8600b08d625292d4bbf584e14847ce1b6b5c44d983d273e1097ea"},
"chatterbox": {:hex, :ts_chatterbox, "0.11.0", "b8f372c706023eb0de5bf2976764edb27c70fe67052c88c1f6a66b3a5626847f", [:rebar3], [{:hpack, "~>0.2.3", [hex: :hpack_erl, repo: "hexpm", optional: false]}], "hexpm", "722fe2bad52913ab7e87d849fc6370375f0c961ffb2f0b5e6d647c9170c382a6"},
- "clickhouse_ecto": {:git, "https://github.com/plausible/clickhouse_ecto.git", "93d86c48230f85797555c348dbe9e8738d3b8cc2", []},
+ "clickhouse_ecto": {:git, "https://github.com/plausible/clickhouse_ecto.git", "7bc94cce111d3e9dbd8534fe96bd5195181826a2", []},
"clickhousex": {:git, "https://github.com/plausible/clickhousex", "6405ac09b4fa103644bb4fe7fc0509fb48497927", []},
"combination": {:hex, :combination, "0.0.3", "746aedca63d833293ec6e835aa1f34974868829b1486b1e1cb0685f0b2ae1f41", [:mix], [], "hexpm", "72b099f463df42ef7dc6371d250c7070b57b6c5902853f69deb894f79eda18ca"},
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
diff --git a/priv/clickhouse_repo/migrations/20211112130238_create_imported_tables.exs b/priv/clickhouse_repo/migrations/20211112130238_create_imported_tables.exs
new file mode 100644
index 000000000000..33bc8b23c4d4
--- /dev/null
+++ b/priv/clickhouse_repo/migrations/20211112130238_create_imported_tables.exs
@@ -0,0 +1,118 @@
+defmodule Plausible.ClickhouseRepo.Migrations.CreateImportedVisitors do
+ use Ecto.Migration
+
+ def change do
+ create_if_not_exists table(:imported_visitors,
+ engine: "MergeTree() ORDER BY (site_id, date)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:visitors, :UInt64)
+ add(:pageviews, :UInt64)
+ add(:bounces, :UInt64)
+ add(:visits, :UInt64)
+ add(:visit_duration, :UInt64)
+ end
+
+ create_if_not_exists table(:imported_sources,
+ engine: "MergeTree() ORDER BY (site_id, date, source)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:source, :string)
+ add(:utm_medium, :string)
+ add(:utm_campaign, :string)
+ add(:utm_content, :string)
+ add(:utm_term, :string)
+ add(:visitors, :UInt64)
+ add(:visits, :UInt64)
+ add(:visit_duration, :UInt64)
+ add(:bounces, :UInt32)
+ end
+
+ create_if_not_exists table(:imported_pages,
+ engine: "MergeTree() ORDER BY (site_id, date, hostname, page)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:hostname, :string)
+ add(:page, :string)
+ add(:visitors, :UInt64)
+ add(:pageviews, :UInt64)
+ add(:exits, :UInt64)
+ add(:time_on_page, :UInt64)
+ end
+
+ create_if_not_exists table(:imported_entry_pages,
+ engine: "MergeTree() ORDER BY (site_id, date, entry_page)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:entry_page, :string)
+ add(:visitors, :UInt64)
+ add(:entrances, :UInt64)
+ add(:visit_duration, :UInt64)
+ add(:bounces, :UInt32)
+ end
+
+ create_if_not_exists table(:imported_exit_pages,
+ engine: "MergeTree() ORDER BY (site_id, date, exit_page)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:exit_page, :string)
+ add(:visitors, :UInt64)
+ add(:exits, :UInt64)
+ end
+
+ create_if_not_exists table(:imported_locations,
+ engine: "MergeTree() ORDER BY (site_id, date, country, region, city)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:country, :string)
+ add(:region, :string)
+ add(:city, :UInt64)
+ add(:visitors, :UInt64)
+ add(:visits, :UInt64)
+ add(:visit_duration, :UInt64)
+ add(:bounces, :UInt32)
+ end
+
+ create_if_not_exists table(:imported_devices,
+ engine: "MergeTree() ORDER BY (site_id, date, device)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:device, :string)
+ add(:visitors, :UInt64)
+ add(:visits, :UInt64)
+ add(:visit_duration, :UInt64)
+ add(:bounces, :UInt32)
+ end
+
+ create_if_not_exists table(:imported_browsers,
+ engine: "MergeTree() ORDER BY (site_id, date, browser)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:browser, :string)
+ add(:visitors, :UInt64)
+ add(:visits, :UInt64)
+ add(:visit_duration, :UInt64)
+ add(:bounces, :UInt32)
+ end
+
+ create_if_not_exists table(:imported_operating_systems,
+ engine: "MergeTree() ORDER BY (site_id, date, operating_system)"
+ ) do
+ add(:site_id, :UInt64)
+ add(:date, :date)
+ add(:operating_system, :string)
+ add(:visitors, :UInt64)
+ add(:visits, :UInt64)
+ add(:visit_duration, :UInt64)
+ add(:bounces, :UInt32)
+ end
+ end
+end
diff --git a/priv/ref_inspector/referers.yml b/priv/ref_inspector/referers.yml
index a6f88f3c7085..7b9cc1b60096 100644
--- a/priv/ref_inspector/referers.yml
+++ b/priv/ref_inspector/referers.yml
@@ -184,7 +184,7 @@ email:
Rambler:
domains:
- - mail.rambler.ru
+ - mail.rambler.ru
Seznam Mail:
domains:
diff --git a/priv/repo/migrations/20191010031425_add_property_to_google_auth.exs b/priv/repo/migrations/20191010031425_add_property_to_google_auth.exs
index 9c16c8bd2028..c9e3ab112676 100644
--- a/priv/repo/migrations/20191010031425_add_property_to_google_auth.exs
+++ b/priv/repo/migrations/20191010031425_add_property_to_google_auth.exs
@@ -6,15 +6,5 @@ defmodule Plausible.Repo.Migrations.AddPropertyToGoogleAuth do
alter table(:google_auth) do
add :property, :text
end
-
- flush()
-
- for auth <- Repo.all(Plausible.Site.GoogleAuth) do
- auth = Repo.preload(auth, :site)
- property = "https://#{auth.site.domain}"
-
- Plausible.Site.GoogleAuth.set_property(auth, %{property: property})
- |> Repo.update!()
- end
end
end
diff --git a/priv/repo/migrations/20211110174617_add_site_imported_source.exs b/priv/repo/migrations/20211110174617_add_site_imported_source.exs
new file mode 100644
index 000000000000..0b30f982dc60
--- /dev/null
+++ b/priv/repo/migrations/20211110174617_add_site_imported_source.exs
@@ -0,0 +1,9 @@
+defmodule Plausible.Repo.Migrations.GoogleAuthImportedSource do
+ use Ecto.Migration
+
+ def change do
+ alter table(:sites) do
+ add :imported_data, :map
+ end
+ end
+end
diff --git a/test/plausible/imported/imported_test.exs b/test/plausible/imported/imported_test.exs
new file mode 100644
index 000000000000..d211d2215e0e
--- /dev/null
+++ b/test/plausible/imported/imported_test.exs
@@ -0,0 +1,606 @@
+defmodule Plausible.ImportedTest do
+ use PlausibleWeb.ConnCase
+ use Timex
+ import Plausible.TestUtils
+
+ @user_id 123
+
+ describe "Parse and import third party data fetched from Google Analytics" do
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
+
+ test "Visitors data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, timestamp: ~N[2021-01-01 00:00:00]),
+ build(:pageview, timestamp: ~N[2021-01-31 00:00:00])
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101"],
+ "metrics" => [%{"values" => ["1", "1", "0", "1", "60"]}]
+ },
+ %{
+ "dimensions" => ["20210131"],
+ "metrics" => [%{"values" => ["1", "1", "1", "1", "60"]}]
+ }
+ ],
+ site.id,
+ "imported_visitors"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/main-graph?period=month&date=2021-01-01&with_imported=true"
+ )
+
+ assert %{"plot" => plot, "imported_source" => "Google Analytics"} = json_response(conn, 200)
+
+ assert Enum.count(plot) == 31
+ assert List.first(plot) == 2
+ assert List.last(plot) == 2
+ assert Enum.sum(plot) == 4
+ end
+
+ test "Sources are imported", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ referrer_source: "Google",
+ referrer: "google.com",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ referrer_source: "Google",
+ referrer: "google.com",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ referrer_source: "DuckDuckGo",
+ referrer: "duckduckgo.com",
+ timestamp: ~N[2021-01-01 00:00:00]
+ )
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "duckduckgo.com", "organic", "", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "0", "60"]}]
+ },
+ %{
+ "dimensions" => ["20210131", "google.com", "organic", "", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "60"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "google.com", "paid", "", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "60"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Twitter", "social", "", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "60"]}]
+ },
+ %{
+ "dimensions" => [
+ "20210131",
+ "A Nice Newsletter",
+ "email",
+ "newsletter",
+ "",
+ ""
+ ],
+ "metrics" => [%{"values" => ["1", "1", "1", "60"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "(direct)", "(none)", "", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "60"]}]
+ }
+ ],
+ site.id,
+ "imported_sources"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/sources?period=month&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Google", "visitors" => 4},
+ %{"name" => "DuckDuckGo", "visitors" => 2},
+ %{"name" => "A Nice Newsletter", "visitors" => 1},
+ %{"name" => "Twitter", "visitors" => 1}
+ ]
+ end
+
+ test "UTM mediums data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ utm_medium: "social",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ utm_medium: "social",
+ timestamp: ~N[2021-01-01 12:00:00]
+ )
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "Twitter", "social", "", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "60"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "(direct)", "(none)", "", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "60"]}]
+ }
+ ],
+ site.id,
+ "imported_sources"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_mediums?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "bounce_rate" => 100.0,
+ "name" => "social",
+ "visit_duration" => 20,
+ "visitors" => 3
+ }
+ ]
+ end
+
+ test "UTM campaigns data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, utm_campaign: "profile", timestamp: ~N[2021-01-01 00:00:00]),
+ build(:pageview, utm_campaign: "august", timestamp: ~N[2021-01-01 00:00:00])
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "Twitter", "social", "profile", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "100"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Gmail", "email", "august", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "0", "100"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Gmail", "email", "(not set)", "", ""],
+ "metrics" => [%{"values" => ["1", "1", "0", "100"]}]
+ }
+ ],
+ site.id,
+ "imported_sources"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_campaigns?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "august",
+ "visitors" => 2,
+ "bounce_rate" => 50.0,
+ "visit_duration" => 50.0
+ },
+ %{
+ "name" => "profile",
+ "visitors" => 2,
+ "bounce_rate" => 100.0,
+ "visit_duration" => 50.0
+ }
+ ]
+ end
+
+ test "UTM terms data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, utm_term: "oat milk", timestamp: ~N[2021-01-01 00:00:00]),
+ build(:pageview, utm_term: "Sweden", timestamp: ~N[2021-01-01 00:00:00])
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "Google", "paid", "", "", "oat milk"],
+ "metrics" => [%{"values" => ["1", "1", "1", "100"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Google", "paid", "", "", "Sweden"],
+ "metrics" => [%{"values" => ["1", "1", "0", "100"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Google", "paid", "", "", "(not set)"],
+ "metrics" => [%{"values" => ["1", "1", "0", "100"]}]
+ }
+ ],
+ site.id,
+ "imported_sources"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_terms?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "Sweden",
+ "visitors" => 2,
+ "bounce_rate" => 50.0,
+ "visit_duration" => 50.0
+ },
+ %{
+ "name" => "oat milk",
+ "visitors" => 2,
+ "bounce_rate" => 100.0,
+ "visit_duration" => 50.0
+ }
+ ]
+ end
+
+ test "UTM contents data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, utm_content: "ad", timestamp: ~N[2021-01-01 00:00:00]),
+ build(:pageview, utm_content: "blog", timestamp: ~N[2021-01-01 00:00:00])
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "Google", "paid", "", "ad", ""],
+ "metrics" => [%{"values" => ["1", "1", "1", "100"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Google", "paid", "", "blog", ""],
+ "metrics" => [%{"values" => ["1", "1", "0", "100"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Google", "paid", "", "(not set)", ""],
+ "metrics" => [%{"values" => ["1", "1", "0", "100"]}]
+ }
+ ],
+ site.id,
+ "imported_sources"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_contents?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "blog",
+ "visitors" => 2,
+ "bounce_rate" => 50.0,
+ "visit_duration" => 50.0
+ },
+ %{
+ "name" => "ad",
+ "visitors" => 2,
+ "bounce_rate" => 100.0,
+ "visit_duration" => 50.0
+ }
+ ]
+ end
+
+ test "Page event data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ pathname: "/",
+ hostname: "host-a.com",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/some-other-page",
+ hostname: "host-a.com",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ )
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "host-a.com", "/"],
+ "metrics" => [%{"values" => ["1", "1", "0", "700"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "host-b.com", "/some-other-page"],
+ "metrics" => [%{"values" => ["1", "2", "1", "60"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "host-b.com", "/some-other-page?wat=wot"],
+ "metrics" => [%{"values" => ["1", "1", "0", "60"]}]
+ }
+ ],
+ site.id,
+ "imported_pages"
+ )
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "/"],
+ "metrics" => [%{"values" => ["1", "3", "10", "1"]}]
+ }
+ ],
+ site.id,
+ "imported_entry_pages"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/pages?period=day&date=2021-01-01&detailed=true&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "bounce_rate" => nil,
+ "time_on_page" => 60,
+ "visitors" => 3,
+ "pageviews" => 4,
+ "name" => "/some-other-page"
+ },
+ %{
+ "bounce_rate" => 25.0,
+ "time_on_page" => 800.0,
+ "visitors" => 2,
+ "pageviews" => 2,
+ "name" => "/"
+ }
+ ]
+ end
+
+ test "Exit page event data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ pathname: "/page1",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page1",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page1",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page2",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ )
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "host-a.com", "/page2"],
+ "metrics" => [%{"values" => ["2", "4", "0", "10"]}]
+ }
+ ],
+ site.id,
+ "imported_pages"
+ )
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "/page2"],
+ "metrics" => [%{"values" => ["2", "3"]}]
+ }
+ ],
+ site.id,
+ "imported_exit_pages"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/exit-pages?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "/page2",
+ "unique_exits" => 3,
+ "total_exits" => 4,
+ "exit_rate" => 80.0
+ },
+ %{"name" => "/page1", "unique_exits" => 2, "total_exits" => 2, "exit_rate" => 66}
+ ]
+ end
+
+ test "Location data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ country_code: "EE",
+ timestamp: ~N[2021-01-01 00:15:00]
+ ),
+ build(:pageview,
+ country_code: "EE",
+ timestamp: ~N[2021-01-01 00:15:00]
+ ),
+ build(:pageview,
+ country_code: "GB",
+ timestamp: ~N[2021-01-01 00:15:00]
+ )
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "EE", "Tartumaa"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "GB", "Midlothian"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ }
+ ],
+ site.id,
+ "imported_locations"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/countries?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "code" => "EE",
+ "alpha_3" => "EST",
+ "name" => "Estonia",
+ "flag" => "🇪🇪",
+ "visitors" => 3,
+ "percentage" => 60
+ },
+ %{
+ "code" => "GB",
+ "alpha_3" => "GBR",
+ "name" => "United Kingdom",
+ "flag" => "🇬🇧",
+ "visitors" => 2,
+ "percentage" => 40
+ }
+ ]
+ end
+
+ test "Devices data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, screen_size: "Desktop", timestamp: ~N[2021-01-01 00:15:00]),
+ build(:pageview, screen_size: "Desktop", timestamp: ~N[2021-01-01 00:15:00]),
+ build(:pageview, screen_size: "Laptop", timestamp: ~N[2021-01-01 00:15:00])
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "mobile"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Laptop"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ }
+ ],
+ site.id,
+ "imported_devices"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/screen-sizes?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Desktop", "visitors" => 2, "percentage" => 40},
+ %{"name" => "Laptop", "visitors" => 2, "percentage" => 40},
+ %{"name" => "Mobile", "visitors" => 1, "percentage" => 20}
+ ]
+ end
+
+ test "Browsers data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, browser: "Chrome", timestamp: ~N[2021-01-01 00:15:00]),
+ build(:pageview, browser: "Firefox", timestamp: ~N[2021-01-01 00:15:00])
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "User-Agent: Mozilla"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Android Browser"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ }
+ ],
+ site.id,
+ "imported_browsers"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/browsers?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Firefox", "visitors" => 2, "percentage" => 50},
+ %{"name" => "Mobile App", "visitors" => 1, "percentage" => 25},
+ %{"name" => "Chrome", "visitors" => 1, "percentage" => 25}
+ ]
+ end
+
+ test "OS data imported from Google Analytics", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, operating_system: "Mac", timestamp: ~N[2021-01-01 00:15:00]),
+ build(:pageview, operating_system: "Mac", timestamp: ~N[2021-01-01 00:15:00]),
+ build(:pageview, operating_system: "GNU/Linux", timestamp: ~N[2021-01-01 00:15:00])
+ ])
+
+ assert :ok =
+ Plausible.Imported.from_google_analytics(
+ [
+ %{
+ "dimensions" => ["20210101", "Macintosh"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ },
+ %{
+ "dimensions" => ["20210101", "Linux"],
+ "metrics" => [%{"values" => ["1", "1", "0", "10"]}]
+ }
+ ],
+ site.id,
+ "imported_operating_systems"
+ )
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/operating-systems?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Mac", "visitors" => 3, "percentage" => 60},
+ %{"name" => "GNU/Linux", "visitors" => 2, "percentage" => 40}
+ ]
+ end
+ end
+end
diff --git a/test/plausible/stats/query_test.exs b/test/plausible/stats/query_test.exs
index 71830ea8b2cb..061989db248f 100644
--- a/test/plausible/stats/query_test.exs
+++ b/test/plausible/stats/query_test.exs
@@ -2,10 +2,10 @@ defmodule Plausible.Stats.QueryTest do
use ExUnit.Case, async: true
alias Plausible.Stats.Query
- @tz "UTC"
+ @site %Plausible.Site{timezone: "UTC"}
test "parses day format" do
- q = Query.from(@tz, %{"period" => "day", "date" => "2019-01-01"})
+ q = Query.from(@site, %{"period" => "day", "date" => "2019-01-01"})
assert q.date_range.first == ~D[2019-01-01]
assert q.date_range.last == ~D[2019-01-01]
@@ -13,7 +13,7 @@ defmodule Plausible.Stats.QueryTest do
end
test "day fromat defaults to today" do
- q = Query.from(@tz, %{"period" => "day"})
+ q = Query.from(@site, %{"period" => "day"})
assert q.date_range.first == Timex.today()
assert q.date_range.last == Timex.today()
@@ -21,7 +21,7 @@ defmodule Plausible.Stats.QueryTest do
end
test "parses realtime format" do
- q = Query.from(@tz, %{"period" => "realtime"})
+ q = Query.from(@site, %{"period" => "realtime"})
assert q.date_range.first == Timex.today()
assert q.date_range.last == Timex.today()
@@ -29,7 +29,7 @@ defmodule Plausible.Stats.QueryTest do
end
test "parses month format" do
- q = Query.from(@tz, %{"period" => "month", "date" => "2019-01-01"})
+ q = Query.from(@site, %{"period" => "month", "date" => "2019-01-01"})
assert q.date_range.first == ~D[2019-01-01]
assert q.date_range.last == ~D[2019-01-31]
@@ -37,7 +37,7 @@ defmodule Plausible.Stats.QueryTest do
end
test "parses 6 month format" do
- q = Query.from(@tz, %{"period" => "6mo"})
+ q = Query.from(@site, %{"period" => "6mo"})
assert q.date_range.first ==
Timex.shift(Timex.today(), months: -5) |> Timex.beginning_of_month()
@@ -47,7 +47,7 @@ defmodule Plausible.Stats.QueryTest do
end
test "parses 12 month format" do
- q = Query.from(@tz, %{"period" => "12mo"})
+ q = Query.from(@site, %{"period" => "12mo"})
assert q.date_range.first ==
Timex.shift(Timex.today(), months: -11) |> Timex.beginning_of_month()
@@ -57,11 +57,11 @@ defmodule Plausible.Stats.QueryTest do
end
test "defaults to 30 days format" do
- assert Query.from(@tz, %{}) == Query.from(@tz, %{"period" => "30d"})
+ assert Query.from(@site, %{}) == Query.from(@site, %{"period" => "30d"})
end
test "parses custom format" do
- q = Query.from(@tz, %{"period" => "custom", "from" => "2019-01-01", "to" => "2019-01-15"})
+ q = Query.from(@site, %{"period" => "custom", "from" => "2019-01-01", "to" => "2019-01-15"})
assert q.date_range.first == ~D[2019-01-01]
assert q.date_range.last == ~D[2019-01-15]
@@ -71,14 +71,14 @@ defmodule Plausible.Stats.QueryTest do
describe "filters" do
test "parses goal filter" do
filters = Jason.encode!(%{"goal" => "Signup"})
- q = Query.from(@tz, %{"period" => "6mo", "filters" => filters})
+ q = Query.from(@site, %{"period" => "6mo", "filters" => filters})
assert q.filters["goal"] == "Signup"
end
test "parses source filter" do
filters = Jason.encode!(%{"source" => "Twitter"})
- q = Query.from(@tz, %{"period" => "6mo", "filters" => filters})
+ q = Query.from(@site, %{"period" => "6mo", "filters" => filters})
assert q.filters["source"] == "Twitter"
end
diff --git a/test/plausible_web/controllers/api/stats_controller/browsers_test.exs b/test/plausible_web/controllers/api/stats_controller/browsers_test.exs
index 5d362277a90d..96fc77441b5d 100644
--- a/test/plausible_web/controllers/api/stats_controller/browsers_test.exs
+++ b/test/plausible_web/controllers/api/stats_controller/browsers_test.exs
@@ -3,7 +3,7 @@ defmodule PlausibleWeb.Api.StatsController.BrowsersTest do
import Plausible.TestUtils
describe "GET /api/stats/:domain/browsers" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top browsers by unique visitors", %{conn: conn, site: site} do
populate_stats(site, [
@@ -40,6 +40,27 @@ defmodule PlausibleWeb.Api.StatsController.BrowsersTest do
}
]
end
+
+ test "returns top browsers including imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, browser: "Chrome"),
+ build(:imported_browsers, browser: "Chrome"),
+ build(:imported_browsers, browser: "Firefox")
+ ])
+
+ conn = get(conn, "/api/stats/#{site.domain}/browsers?period=day")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Chrome", "visitors" => 1, "percentage" => 100}
+ ]
+
+ conn = get(conn, "/api/stats/#{site.domain}/browsers?period=day&with_imported=true")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Chrome", "visitors" => 2, "percentage" => 67},
+ %{"name" => "Firefox", "visitors" => 1, "percentage" => 33}
+ ]
+ end
end
describe "GET /api/stats/:domain/browser-versions" do
diff --git a/test/plausible_web/controllers/api/stats_controller/countries_test.exs b/test/plausible_web/controllers/api/stats_controller/countries_test.exs
index ecfa4f4a8739..dd0485b59b53 100644
--- a/test/plausible_web/controllers/api/stats_controller/countries_test.exs
+++ b/test/plausible_web/controllers/api/stats_controller/countries_test.exs
@@ -3,7 +3,7 @@ defmodule PlausibleWeb.Api.StatsController.CountriesTest do
import Plausible.TestUtils
describe "GET /api/stats/:domain/countries" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top countries by new visitors", %{conn: conn, site: site} do
populate_stats(site, [
@@ -15,6 +15,12 @@ defmodule PlausibleWeb.Api.StatsController.CountriesTest do
),
build(:pageview,
country_code: "GB"
+ ),
+ build(:imported_locations,
+ country: "EE"
+ ),
+ build(:imported_locations,
+ country: "GB"
)
])
@@ -38,6 +44,27 @@ defmodule PlausibleWeb.Api.StatsController.CountriesTest do
"percentage" => 33
}
]
+
+ conn = get(conn, "/api/stats/#{site.domain}/countries?period=day&with_imported=true")
+
+ assert json_response(conn, 200) == [
+ %{
+ "code" => "EE",
+ "alpha_3" => "EST",
+ "name" => "Estonia",
+ "flag" => "🇪🇪",
+ "visitors" => 3,
+ "percentage" => 60
+ },
+ %{
+ "code" => "GB",
+ "alpha_3" => "GBR",
+ "name" => "United Kingdom",
+ "flag" => "🇬🇧",
+ "visitors" => 2,
+ "percentage" => 40
+ }
+ ]
end
test "calculates conversion_rate when filtering for goal", %{conn: conn, site: site} do
diff --git a/test/plausible_web/controllers/api/stats_controller/main_graph_test.exs b/test/plausible_web/controllers/api/stats_controller/main_graph_test.exs
index 0707ea921519..0d8fa0216e5a 100644
--- a/test/plausible_web/controllers/api/stats_controller/main_graph_test.exs
+++ b/test/plausible_web/controllers/api/stats_controller/main_graph_test.exs
@@ -4,7 +4,7 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
@user_id 123
describe "GET /api/stats/main-graph - plot" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "displays pageviews for the last 30 minutes in realtime graph", %{conn: conn, site: site} do
populate_stats(site, [
@@ -66,6 +66,75 @@ defmodule PlausibleWeb.Api.StatsController.MainGraphTest do
assert Enum.count(plot) == 31
assert List.first(plot) == 1
assert List.last(plot) == 1
+ assert Enum.sum(plot) == 2
+ end
+
+ test "displays visitors for a month with imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, timestamp: ~N[2021-01-01 00:00:00]),
+ build(:pageview, timestamp: ~N[2021-01-31 00:00:00]),
+ build(:imported_visitors, date: ~D[2021-01-01]),
+ build(:imported_visitors, date: ~D[2021-01-31])
+ ])
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/main-graph?period=month&date=2021-01-01&with_imported=true"
+ )
+
+ assert %{"plot" => plot, "imported_source" => "Google Analytics"} = json_response(conn, 200)
+
+ assert Enum.count(plot) == 31
+ assert List.first(plot) == 2
+ assert List.last(plot) == 2
+ assert Enum.sum(plot) == 4
+ end
+
+ test "displays visitors for a month with imported data and filter", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, timestamp: ~N[2021-01-01 00:00:00], pathname: "/pageA"),
+ build(:pageview, timestamp: ~N[2021-01-31 00:00:00], pathname: "/pageA"),
+ build(:imported_visitors, date: ~D[2021-01-01]),
+ build(:imported_visitors, date: ~D[2021-01-31])
+ ])
+
+ filters = Jason.encode!(%{page: "/pageA"})
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/main-graph?period=month&date=2021-01-01&with_imported=true&filters=#{filters}"
+ )
+
+ assert %{"plot" => plot} = json_response(conn, 200)
+
+ assert Enum.count(plot) == 31
+ assert List.first(plot) == 1
+ assert List.last(plot) == 1
+ assert Enum.sum(plot) == 2
+ end
+
+ test "displays visitors for 6 months with imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, timestamp: ~N[2021-01-01 00:00:00]),
+ build(:pageview, timestamp: ~N[2021-06-30 00:00:00]),
+ build(:imported_visitors, date: ~D[2021-01-01]),
+ build(:imported_visitors, date: ~D[2021-06-30])
+ ])
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/main-graph?period=6mo&date=2021-06-30&with_imported=true"
+ )
+
+ assert %{"plot" => plot} = json_response(conn, 200)
+
+ assert Enum.count(plot) == 6
+ assert List.first(plot) == 2
+ assert List.last(plot) == 2
+ assert Enum.sum(plot) == 4
end
# TODO: missing 6, 12 months, 30 days
diff --git a/test/plausible_web/controllers/api/stats_controller/operating_systems_test.exs b/test/plausible_web/controllers/api/stats_controller/operating_systems_test.exs
index f1f700e55ddf..f0d025ca8bc0 100644
--- a/test/plausible_web/controllers/api/stats_controller/operating_systems_test.exs
+++ b/test/plausible_web/controllers/api/stats_controller/operating_systems_test.exs
@@ -3,7 +3,7 @@ defmodule PlausibleWeb.Api.StatsController.OperatingSystemsTest do
import Plausible.TestUtils
describe "GET /api/stats/:domain/operating_systems" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns operating systems by unique visitors", %{conn: conn, site: site} do
populate_stats(site, [
@@ -41,6 +41,57 @@ defmodule PlausibleWeb.Api.StatsController.OperatingSystemsTest do
}
]
end
+
+ test "returns operating systems by unique visitors with imported data", %{
+ conn: conn,
+ site: site
+ } do
+ populate_stats(site, [
+ build(:pageview, operating_system: "Mac"),
+ build(:pageview, operating_system: "Mac"),
+ build(:pageview, operating_system: "Android"),
+ build(:imported_operating_systems, operating_system: "Mac"),
+ build(:imported_operating_systems, operating_system: "Android")
+ ])
+
+ conn = get(conn, "/api/stats/#{site.domain}/operating-systems?period=day")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Mac", "visitors" => 2, "percentage" => 67},
+ %{"name" => "Android", "visitors" => 1, "percentage" => 33}
+ ]
+
+ conn =
+ get(conn, "/api/stats/#{site.domain}/operating-systems?period=day&with_imported=true")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Mac", "visitors" => 3, "percentage" => 60},
+ %{"name" => "Android", "visitors" => 2, "percentage" => 40}
+ ]
+ end
+
+ test "imported data is ignored when filtering for goal", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, user_id: 1, operating_system: "Mac"),
+ build(:pageview, user_id: 2, operating_system: "Mac"),
+ build(:imported_operating_systems, operating_system: "Mac"),
+ build(:event, user_id: 1, name: "Signup")
+ ])
+
+ filters = Jason.encode!(%{"goal" => "Signup"})
+
+ conn =
+ get(conn, "/api/stats/#{site.domain}/operating-systems?period=day&filters=#{filters}")
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "Mac",
+ "total_visitors" => 2,
+ "visitors" => 1,
+ "conversion_rate" => 50.0
+ }
+ ]
+ end
end
describe "GET /api/stats/:domain/operating-system-versions" do
diff --git a/test/plausible_web/controllers/api/stats_controller/pages_test.exs b/test/plausible_web/controllers/api/stats_controller/pages_test.exs
index e9db51ec9556..9cacb00cee3a 100644
--- a/test/plausible_web/controllers/api/stats_controller/pages_test.exs
+++ b/test/plausible_web/controllers/api/stats_controller/pages_test.exs
@@ -4,7 +4,7 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
@user_id 123
describe "GET /api/stats/:domain/pages" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top pages by visitors", %{conn: conn, site: site} do
populate_stats(site, [
@@ -25,6 +25,35 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
]
end
+ test "returns top pages by visitors with imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, pathname: "/"),
+ build(:pageview, pathname: "/"),
+ build(:pageview, pathname: "/"),
+ build(:imported_pages, page: "/"),
+ build(:pageview, pathname: "/register"),
+ build(:pageview, pathname: "/register"),
+ build(:imported_pages, page: "/register"),
+ build(:pageview, pathname: "/contact")
+ ])
+
+ conn = get(conn, "/api/stats/#{site.domain}/pages?period=day")
+
+ assert json_response(conn, 200) == [
+ %{"visitors" => 3, "name" => "/"},
+ %{"visitors" => 2, "name" => "/register"},
+ %{"visitors" => 1, "name" => "/contact"}
+ ]
+
+ conn = get(conn, "/api/stats/#{site.domain}/pages?period=day&with_imported=true")
+
+ assert json_response(conn, 200) == [
+ %{"visitors" => 4, "name" => "/"},
+ %{"visitors" => 3, "name" => "/register"},
+ %{"visitors" => 1, "name" => "/contact"}
+ ]
+ end
+
test "calculates bounce rate and time on page for pages", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview,
@@ -67,6 +96,67 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
]
end
+ test "calculates bounce rate and time on page for pages with imported data", %{
+ conn: conn,
+ site: site
+ } do
+ populate_stats(site, [
+ build(:pageview,
+ pathname: "/",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/some-other-page",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ ),
+ build(:pageview,
+ pathname: "/",
+ timestamp: ~N[2021-01-01 00:15:00]
+ ),
+ build(:imported_pages,
+ page: "/",
+ date: ~D[2021-01-01],
+ time_on_page: 700
+ ),
+ build(:imported_entry_pages,
+ entry_page: "/",
+ date: ~D[2021-01-01],
+ entrances: 3,
+ bounces: 1
+ ),
+ build(:imported_pages,
+ page: "/some-other-page",
+ date: ~D[2021-01-01],
+ time_on_page: 60
+ )
+ ])
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/pages?period=day&date=2021-01-01&detailed=true&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "bounce_rate" => 40.0,
+ "time_on_page" => 800.0,
+ "visitors" => 3,
+ "pageviews" => 3,
+ "name" => "/"
+ },
+ %{
+ "bounce_rate" => nil,
+ "time_on_page" => 60,
+ "visitors" => 2,
+ "pageviews" => 2,
+ "name" => "/some-other-page"
+ }
+ ]
+ end
+
test "returns top pages in realtime report", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, pathname: "/page1"),
@@ -101,7 +191,7 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
end
describe "GET /api/stats/:domain/entry-pages" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top entry pages by visitors", %{conn: conn, site: site} do
populate_stats(site, [
@@ -151,6 +241,85 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
]
end
+ test "returns top entry pages by visitors with imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ pathname: "/page1",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page1",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page2",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page2",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ )
+ ])
+
+ populate_stats(site, [
+ build(:pageview,
+ pathname: "/page2",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 23:15:00]
+ )
+ ])
+
+ populate_stats(site, [
+ build(:imported_entry_pages,
+ entry_page: "/page2",
+ date: ~D[2021-01-01],
+ entrances: 3,
+ visitors: 2,
+ visit_duration: 300
+ )
+ ])
+
+ conn = get(conn, "/api/stats/#{site.domain}/entry-pages?period=day&date=2021-01-01")
+
+ assert json_response(conn, 200) == [
+ %{
+ "unique_entrances" => 2,
+ "total_entrances" => 2,
+ "name" => "/page1",
+ "visit_duration" => 0
+ },
+ %{
+ "unique_entrances" => 1,
+ "total_entrances" => 2,
+ "name" => "/page2",
+ "visit_duration" => 450
+ }
+ ]
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/entry-pages?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "unique_entrances" => 3,
+ "total_entrances" => 5,
+ "name" => "/page2",
+ "visit_duration" => 240.0
+ },
+ %{
+ "unique_entrances" => 2,
+ "total_entrances" => 2,
+ "name" => "/page1",
+ "visit_duration" => 0
+ }
+ ]
+ end
+
test "calculates conversion_rate when filtering for goal", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview,
@@ -215,7 +384,7 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
end
describe "GET /api/stats/:domain/exit-pages" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top exit pages by visitors", %{conn: conn, site: site} do
populate_stats(site, [
@@ -247,6 +416,67 @@ defmodule PlausibleWeb.Api.StatsController.PagesTest do
]
end
+ test "returns top exit pages by visitors with imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ pathname: "/page1",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page1",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page1",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ pathname: "/page2",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ )
+ ])
+
+ populate_stats(site, [
+ build(:imported_pages,
+ page: "/page2",
+ date: ~D[2021-01-01],
+ pageviews: 4,
+ visitors: 2
+ ),
+ build(:imported_exit_pages,
+ exit_page: "/page2",
+ date: ~D[2021-01-01],
+ exits: 3,
+ visitors: 2
+ )
+ ])
+
+ conn = get(conn, "/api/stats/#{site.domain}/exit-pages?period=day&date=2021-01-01")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "/page1", "unique_exits" => 2, "total_exits" => 2, "exit_rate" => 66},
+ %{"name" => "/page2", "unique_exits" => 1, "total_exits" => 1, "exit_rate" => 100}
+ ]
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/exit-pages?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "/page2",
+ "unique_exits" => 3,
+ "total_exits" => 4,
+ "exit_rate" => 80.0
+ },
+ %{"name" => "/page1", "unique_exits" => 2, "total_exits" => 2, "exit_rate" => 66}
+ ]
+ end
+
test "calculates correct exit rate and conversion_rate when filtering for goal", %{
conn: conn,
site: site
diff --git a/test/plausible_web/controllers/api/stats_controller/screen_sizes_test.exs b/test/plausible_web/controllers/api/stats_controller/screen_sizes_test.exs
index 56751b96e163..029546cac413 100644
--- a/test/plausible_web/controllers/api/stats_controller/screen_sizes_test.exs
+++ b/test/plausible_web/controllers/api/stats_controller/screen_sizes_test.exs
@@ -3,7 +3,7 @@ defmodule PlausibleWeb.Api.StatsController.ScreenSizesTest do
import Plausible.TestUtils
describe "GET /api/stats/:domain/browsers" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns screen sizes by new visitors", %{conn: conn, site: site} do
populate_stats(site, [
@@ -20,6 +20,34 @@ defmodule PlausibleWeb.Api.StatsController.ScreenSizesTest do
]
end
+ test "returns screen sizes by new visitors with imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, screen_size: "Desktop"),
+ build(:pageview, screen_size: "Desktop"),
+ build(:pageview, screen_size: "Laptop")
+ ])
+
+ populate_stats(site, [
+ build(:imported_devices, device: "Mobile"),
+ build(:imported_devices, device: "Laptop")
+ ])
+
+ conn = get(conn, "/api/stats/#{site.domain}/screen-sizes?period=day")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Desktop", "visitors" => 2, "percentage" => 67},
+ %{"name" => "Laptop", "visitors" => 1, "percentage" => 33}
+ ]
+
+ conn = get(conn, "/api/stats/#{site.domain}/screen-sizes?period=day&with_imported=true")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Desktop", "visitors" => 2, "percentage" => 40},
+ %{"name" => "Laptop", "visitors" => 2, "percentage" => 40},
+ %{"name" => "Mobile", "visitors" => 1, "percentage" => 20}
+ ]
+ end
+
test "calculates conversion_rate when filtering for goal", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview, user_id: 1, screen_size: "Desktop"),
diff --git a/test/plausible_web/controllers/api/stats_controller/sources_test.exs b/test/plausible_web/controllers/api/stats_controller/sources_test.exs
index d16636730751..97f88ff5917c 100644
--- a/test/plausible_web/controllers/api/stats_controller/sources_test.exs
+++ b/test/plausible_web/controllers/api/stats_controller/sources_test.exs
@@ -4,7 +4,7 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
@user_id 123
describe "GET /api/stats/:domain/sources" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top sources by unique user ids", %{conn: conn, site: site} do
populate_stats(site, [
@@ -30,6 +30,39 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
]
end
+ test "returns top sources with imported data", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview, referrer_source: "Google", referrer: "google.com"),
+ build(:pageview, referrer_source: "Google", referrer: "google.com"),
+ build(:pageview, referrer_source: "DuckDuckGo", referrer: "duckduckgo.com")
+ ])
+
+ populate_stats(site, [
+ build(:imported_sources,
+ source: "Google",
+ visitors: 2
+ ),
+ build(:imported_sources,
+ source: "DuckDuckGo",
+ visitors: 1
+ )
+ ])
+
+ conn = get(conn, "/api/stats/#{site.domain}/sources")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Google", "visitors" => 2},
+ %{"name" => "DuckDuckGo", "visitors" => 1}
+ ]
+
+ conn = get(conn, "/api/stats/#{site.domain}/sources?with_imported=true")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "Google", "visitors" => 4},
+ %{"name" => "DuckDuckGo", "visitors" => 2}
+ ]
+ end
+
test "calculates bounce rate and visit duration for sources", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview,
@@ -73,6 +106,92 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
]
end
+ test "calculates bounce rate and visit duration for sources with imported data", %{
+ conn: conn,
+ site: site
+ } do
+ populate_stats(site, [
+ build(:pageview,
+ referrer_source: "Google",
+ referrer: "google.com",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ referrer_source: "Google",
+ referrer: "google.com",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ ),
+ build(:pageview,
+ referrer_source: "DuckDuckGo",
+ referrer: "duckduckgo.com",
+ timestamp: ~N[2021-01-01 00:00:00]
+ )
+ ])
+
+ populate_stats(site, [
+ build(:imported_sources,
+ source: "Google",
+ date: ~D[2021-01-01],
+ visitors: 2,
+ visits: 3,
+ bounces: 1,
+ visit_duration: 900
+ ),
+ build(:imported_sources,
+ source: "DuckDuckGo",
+ date: ~D[2021-01-01],
+ visitors: 1,
+ visits: 1,
+ visit_duration: 100,
+ bounces: 0
+ )
+ ])
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/sources?period=day&date=2021-01-01&detailed=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "Google",
+ "visitors" => 1,
+ "bounce_rate" => 0,
+ "visit_duration" => 900
+ },
+ %{
+ "name" => "DuckDuckGo",
+ "visitors" => 1,
+ "bounce_rate" => 100,
+ "visit_duration" => 0
+ }
+ ]
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/sources?period=day&date=2021-01-01&detailed=true&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "Google",
+ "visitors" => 3,
+ "bounce_rate" => 25,
+ "visit_duration" => 450.0
+ },
+ %{
+ "name" => "DuckDuckGo",
+ "visitors" => 2,
+ "bounce_rate" => 50,
+ "visit_duration" => 50
+ }
+ ]
+ end
+
test "returns top sources in realtime report", %{conn: conn, site: site} do
populate_stats(site, [
build(:pageview,
@@ -113,6 +232,9 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
build(:pageview,
referrer_source: "DuckDuckGo",
referrer: "duckduckgo.com"
+ ),
+ build(:imported_sources,
+ source: "DuckDuckGo"
)
])
@@ -121,6 +243,12 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
assert json_response(conn, 200) == [
%{"name" => "DuckDuckGo", "visitors" => 1}
]
+
+ conn = get(conn, "/api/stats/#{site.domain}/sources?limit=1&page=2&with_imported=true")
+
+ assert json_response(conn, 200) == [
+ %{"name" => "DuckDuckGo", "visitors" => 2}
+ ]
end
test "shows sources for a page", %{conn: conn, site: site} do
@@ -141,7 +269,7 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
end
describe "GET /api/stats/:domain/utm_mediums" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top utm_mediums by unique user ids", %{conn: conn, site: site} do
populate_stats(site, [
@@ -161,6 +289,25 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
)
])
+ populate_stats(site, [
+ build(:imported_sources,
+ utm_medium: "social",
+ date: ~D[2021-01-01],
+ visit_duration: 700,
+ bounces: 1,
+ visits: 1,
+ visitors: 1
+ ),
+ build(:imported_sources,
+ utm_medium: "email",
+ date: ~D[2021-01-01],
+ bounces: 0,
+ visits: 1,
+ visitors: 1,
+ visit_duration: 100
+ )
+ ])
+
conn =
get(
conn,
@@ -181,11 +328,32 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
"visit_duration" => 0
}
]
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_mediums?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "social",
+ "visitors" => 2,
+ "bounce_rate" => 50,
+ "visit_duration" => 800.0
+ },
+ %{
+ "name" => "email",
+ "visitors" => 2,
+ "bounce_rate" => 50,
+ "visit_duration" => 50
+ }
+ ]
end
end
describe "GET /api/stats/:domain/utm_campaigns" do
- setup [:create_user, :log_in, :create_new_site]
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
test "returns top utm_campaigns by unique user ids", %{conn: conn, site: site} do
populate_stats(site, [
@@ -209,6 +377,25 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
)
])
+ populate_stats(site, [
+ build(:imported_sources,
+ utm_campaign: "profile",
+ date: ~D[2021-01-01],
+ visit_duration: 700,
+ bounces: 1,
+ visits: 1,
+ visitors: 1
+ ),
+ build(:imported_sources,
+ utm_campaign: "august",
+ date: ~D[2021-01-01],
+ bounces: 0,
+ visits: 1,
+ visitors: 1,
+ visit_duration: 900
+ )
+ ])
+
conn =
get(
conn,
@@ -229,6 +416,27 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
"visit_duration" => 900
}
]
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_campaigns?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "august",
+ "visitors" => 3,
+ "bounce_rate" => 67,
+ "visit_duration" => 300
+ },
+ %{
+ "name" => "profile",
+ "visitors" => 2,
+ "bounce_rate" => 50,
+ "visit_duration" => 800.0
+ }
+ ]
end
end
@@ -301,6 +509,11 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
)
])
+ # Imported data is ignored when filtering
+ populate_stats(site, [
+ build(:imported_sources, source: "Twitter")
+ ])
+
filters = Jason.encode!(%{goal: "Signup"})
conn =
@@ -548,4 +761,180 @@ defmodule PlausibleWeb.Api.StatsController.SourcesTest do
}
end
end
+
+ describe "GET /api/stats/:domain/utm_terms" do
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
+
+ test "returns top utm_terms by unique user ids", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ utm_term: "oat milk",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ utm_term: "oat milk",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ ),
+ build(:pageview,
+ utm_term: "Sweden",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ utm_term: "Sweden",
+ timestamp: ~N[2021-01-01 00:00:00]
+ )
+ ])
+
+ populate_stats(site, [
+ build(:imported_sources,
+ utm_term: "oat milk",
+ date: ~D[2021-01-01],
+ visit_duration: 700,
+ bounces: 1,
+ visits: 1,
+ visitors: 1
+ ),
+ build(:imported_sources,
+ utm_term: "Sweden",
+ date: ~D[2021-01-01],
+ bounces: 0,
+ visits: 1,
+ visitors: 1,
+ visit_duration: 900
+ )
+ ])
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_terms?period=day&date=2021-01-01"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "Sweden",
+ "visitors" => 2,
+ "bounce_rate" => 100,
+ "visit_duration" => 0
+ },
+ %{
+ "name" => "oat milk",
+ "visitors" => 1,
+ "bounce_rate" => 0,
+ "visit_duration" => 900
+ }
+ ]
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_terms?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "Sweden",
+ "visitors" => 3,
+ "bounce_rate" => 67,
+ "visit_duration" => 300
+ },
+ %{
+ "name" => "oat milk",
+ "visitors" => 2,
+ "bounce_rate" => 50,
+ "visit_duration" => 800.0
+ }
+ ]
+ end
+ end
+
+ describe "GET /api/stats/:domain/utm_contents" do
+ setup [:create_user, :log_in, :create_new_site, :add_imported_data]
+
+ test "returns top utm_contents by unique user ids", %{conn: conn, site: site} do
+ populate_stats(site, [
+ build(:pageview,
+ utm_content: "ad",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ utm_content: "ad",
+ user_id: @user_id,
+ timestamp: ~N[2021-01-01 00:15:00]
+ ),
+ build(:pageview,
+ utm_content: "blog",
+ timestamp: ~N[2021-01-01 00:00:00]
+ ),
+ build(:pageview,
+ utm_content: "blog",
+ timestamp: ~N[2021-01-01 00:00:00]
+ )
+ ])
+
+ populate_stats(site, [
+ build(:imported_sources,
+ utm_content: "ad",
+ date: ~D[2021-01-01],
+ visit_duration: 700,
+ bounces: 1,
+ visits: 1,
+ visitors: 1
+ ),
+ build(:imported_sources,
+ utm_content: "blog",
+ date: ~D[2021-01-01],
+ bounces: 0,
+ visits: 1,
+ visitors: 1,
+ visit_duration: 900
+ )
+ ])
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_contents?period=day&date=2021-01-01"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "blog",
+ "visitors" => 2,
+ "bounce_rate" => 100,
+ "visit_duration" => 0
+ },
+ %{
+ "name" => "ad",
+ "visitors" => 1,
+ "bounce_rate" => 0,
+ "visit_duration" => 900
+ }
+ ]
+
+ conn =
+ get(
+ conn,
+ "/api/stats/#{site.domain}/utm_contents?period=day&date=2021-01-01&with_imported=true"
+ )
+
+ assert json_response(conn, 200) == [
+ %{
+ "name" => "blog",
+ "visitors" => 3,
+ "bounce_rate" => 67,
+ "visit_duration" => 300
+ },
+ %{
+ "name" => "ad",
+ "visitors" => 2,
+ "bounce_rate" => 50,
+ "visit_duration" => 800.0
+ }
+ ]
+ end
+ end
end
diff --git a/test/plausible_web/controllers/site_controller_test.exs b/test/plausible_web/controllers/site_controller_test.exs
index 6ccf7d62f107..3d25b8398d68 100644
--- a/test/plausible_web/controllers/site_controller_test.exs
+++ b/test/plausible_web/controllers/site_controller_test.exs
@@ -2,6 +2,7 @@ defmodule PlausibleWeb.SiteControllerTest do
use PlausibleWeb.ConnCase
use Plausible.Repo
use Bamboo.Test
+ use Oban.Testing, repo: Plausible.Repo
import Plausible.TestUtils
describe "GET /sites/new" do
@@ -374,7 +375,7 @@ defmodule PlausibleWeb.SiteControllerTest do
test "deletes associated google auth", %{conn: conn, user: user, site: site} do
insert(:google_auth, user: user, site: site)
- conn = delete(conn, "/#{site.domain}/settings/google")
+ conn = delete(conn, "/#{site.domain}/settings/google-search")
refute Repo.exists?(Plausible.Site.GoogleAuth)
assert redirected_to(conn, 302) == "/#{site.domain}/settings/search-console"
@@ -718,4 +719,38 @@ defmodule PlausibleWeb.SiteControllerTest do
assert Repo.aggregate(Plausible.Site.CustomDomain, :count, :id) == 0
end
end
+
+ describe "POST /:website/settings/google-import" do
+ setup [:create_user, :log_in, :create_new_site]
+
+ test "adds in-progress imported tag to site", %{conn: conn, site: site} do
+ post(conn, "/#{site.domain}/settings/google-import", %{"profile" => "123"})
+
+ imported_data = Repo.reload(site).imported_data
+
+ assert imported_data
+ assert imported_data.source == "Google Analytics"
+ assert imported_data.end_date == Timex.today()
+ assert imported_data.status == "importing"
+ end
+
+ test "schedules an import job in Oban", %{conn: conn, site: site} do
+ post(conn, "/#{site.domain}/settings/google-import", %{"profile" => "123"})
+
+ assert_enqueued(
+ worker: Plausible.Workers.ImportGoogleAnalytics,
+ args: %{"site_id" => site.id, "profile" => "123"}
+ )
+ end
+ end
+
+ describe "DELETE /:website/settings/:forget_imported" do
+ setup [:create_user, :log_in, :create_new_site]
+
+ test "removes imported_data field from site", %{conn: conn, site: site} do
+ delete(conn, "/#{site.domain}/settings/forget-imported")
+
+ assert Repo.reload(site).imported_data == nil
+ end
+ end
end
diff --git a/test/support/factory.ex b/test/support/factory.ex
index 754668f83325..30ca268069f1 100644
--- a/test/support/factory.ex
+++ b/test/support/factory.ex
@@ -180,6 +180,114 @@ defmodule Plausible.Factory do
}
end
+ def imported_visitors_factory do
+ %{
+ table: "imported_visitors",
+ date: Timex.today(),
+ visitors: 1,
+ pageviews: 1,
+ bounces: 0,
+ visits: 1,
+ visit_duration: 10
+ }
+ end
+
+ def imported_sources_factory do
+ %{
+ table: "imported_sources",
+ date: Timex.today(),
+ source: "",
+ visitors: 1,
+ visits: 1,
+ bounces: 0,
+ visit_duration: 10
+ }
+ end
+
+ def imported_pages_factory do
+ %{
+ table: "imported_pages",
+ date: Timex.today(),
+ page: "",
+ visitors: 1,
+ pageviews: 1,
+ exits: 0,
+ time_on_page: 10
+ }
+ end
+
+ def imported_entry_pages_factory do
+ %{
+ table: "imported_entry_pages",
+ date: Timex.today(),
+ entry_page: "",
+ visitors: 1,
+ entrances: 1,
+ bounces: 0,
+ visit_duration: 10
+ }
+ end
+
+ def imported_exit_pages_factory do
+ %{
+ table: "imported_exit_pages",
+ date: Timex.today(),
+ exit_page: "",
+ visitors: 1,
+ exits: 1
+ }
+ end
+
+ def imported_locations_factory do
+ %{
+ table: "imported_locations",
+ date: Timex.today(),
+ country: "",
+ region: "",
+ city: 0,
+ visitors: 1,
+ visits: 1,
+ bounces: 0,
+ visit_duration: 10
+ }
+ end
+
+ def imported_devices_factory do
+ %{
+ table: "imported_devices",
+ date: Timex.today(),
+ device: "",
+ visitors: 1,
+ visits: 1,
+ bounces: 0,
+ visit_duration: 10
+ }
+ end
+
+ def imported_browsers_factory do
+ %{
+ table: "imported_browsers",
+ date: Timex.today(),
+ browser: "",
+ visitors: 1,
+ visits: 1,
+ bounces: 0,
+ visit_duration: 10
+ }
+ end
+
+ def imported_operating_systems_factory do
+ %{
+ table: "imported_operating_systems",
+ date: Timex.today(),
+ operating_system: "",
+ visitors: 1,
+ visits: 1,
+ bounces: 0,
+ visit_duration: 10
+ }
+ end
+
defp hash_key() do
Keyword.fetch!(
Application.get_env(:plausible, PlausibleWeb.Endpoint),
diff --git a/test/support/test_utils.ex b/test/support/test_utils.ex
index 616846e7dd0e..e0fdeb094664 100644
--- a/test/support/test_utils.ex
+++ b/test/support/test_utils.ex
@@ -11,6 +11,15 @@ defmodule Plausible.TestUtils do
{:ok, site: site}
end
+ def add_imported_data(%{site: site}) do
+ site =
+ site
+ |> Plausible.Site.start_import("Google Analytics", "ok")
+ |> Repo.update!()
+
+ {:ok, site: site}
+ end
+
def create_new_site(%{user: user}) do
site = Factory.insert(:site, members: [user])
{:ok, site: site}
@@ -81,12 +90,34 @@ defmodule Plausible.TestUtils do
def populate_stats(site, events) do
Enum.map(events, fn event ->
- Map.put(event, :domain, site.domain)
+ case event do
+ %Plausible.ClickhouseEvent{} ->
+ Map.put(event, :domain, site.domain)
+
+ _ ->
+ Map.put(event, :site_id, site.id)
+ end
end)
|> populate_stats
end
def populate_stats(events) do
+ {native, imported} =
+ Enum.split_with(events, fn event ->
+ case event do
+ %Plausible.ClickhouseEvent{} ->
+ true
+
+ _ ->
+ false
+ end
+ end)
+
+ if native, do: populate_native_stats(native)
+ if imported, do: populate_imported_stats(imported)
+ end
+
+ defp populate_native_stats(events) do
sessions =
Enum.reduce(events, %{}, fn event, sessions ->
Plausible.Session.Store.reconcile_event(sessions, event)
@@ -108,6 +139,11 @@ defmodule Plausible.TestUtils do
)
end
+ defp populate_imported_stats(events) do
+ Enum.group_by(events, &Map.fetch!(&1, :table), &Map.delete(&1, :table))
+ |> Enum.map(fn {table, events} -> Plausible.ClickhouseRepo.insert_all(table, events) end)
+ end
+
def relative_time(shifts) do
NaiveDateTime.utc_now()
|> Timex.shift(shifts)
diff --git a/test/workers/import_google_analytics_test.exs b/test/workers/import_google_analytics_test.exs
new file mode 100644
index 000000000000..d126144b9920
--- /dev/null
+++ b/test/workers/import_google_analytics_test.exs
@@ -0,0 +1,86 @@
+defmodule Plausible.Workers.ImportGoogleAnalyticsTest do
+ use Plausible.DataCase
+ use Bamboo.Test
+ import Double
+ alias Plausible.Workers.ImportGoogleAnalytics
+
+ @imported_data %Plausible.Site.ImportedData{
+ end_date: Timex.today(),
+ source: "Google Analytics",
+ status: "importing"
+ }
+
+ test "updates the imported_data field for the site after succesful import" do
+ user = insert(:user, trial_expiry_date: Timex.today() |> Timex.shift(days: 1))
+ site = insert(:site, members: [user], imported_data: @imported_data)
+
+ api_stub =
+ stub(Plausible.Google.Api, :import_analytics, fn _site, _profile ->
+ {:ok, nil}
+ end)
+
+ ImportGoogleAnalytics.perform(
+ %Oban.Job{args: %{"site_id" => site.id, "profile" => "profile"}},
+ api_stub
+ )
+
+ assert Repo.reload!(site).imported_data.status == "ok"
+ end
+
+ test "sends email to owner after succesful import" do
+ user = insert(:user, trial_expiry_date: Timex.today() |> Timex.shift(days: 1))
+ site = insert(:site, members: [user], imported_data: @imported_data)
+
+ api_stub =
+ stub(Plausible.Google.Api, :import_analytics, fn _site, _profile ->
+ {:ok, nil}
+ end)
+
+ ImportGoogleAnalytics.perform(
+ %Oban.Job{args: %{"site_id" => site.id, "profile" => "profile"}},
+ api_stub
+ )
+
+ assert_email_delivered_with(
+ to: [user],
+ subject: "Google Analytics data imported for #{site.domain}"
+ )
+ end
+
+ test "updates site record after failed import" do
+ user = insert(:user, trial_expiry_date: Timex.today() |> Timex.shift(days: 1))
+ site = insert(:site, members: [user], imported_data: @imported_data)
+
+ api_stub =
+ stub(Plausible.Google.Api, :import_analytics, fn _site, _profile ->
+ {:error, "Something went wrong"}
+ end)
+
+ ImportGoogleAnalytics.perform(
+ %Oban.Job{args: %{"site_id" => site.id, "profile" => "profile"}},
+ api_stub
+ )
+
+ assert Repo.reload!(site).imported_data.status == "error"
+ end
+
+ test "sends email to owner after failed import" do
+ user = insert(:user, trial_expiry_date: Timex.today() |> Timex.shift(days: 1))
+ site = insert(:site, members: [user], imported_data: @imported_data)
+
+ api_stub =
+ stub(Plausible.Google.Api, :import_analytics, fn _site, _profile ->
+ {:error, "Something went wrong"}
+ end)
+
+ ImportGoogleAnalytics.perform(
+ %Oban.Job{args: %{"site_id" => site.id, "profile" => "profile"}},
+ api_stub
+ )
+
+ assert_email_delivered_with(
+ to: [user],
+ subject: "Google Analytics import failed for #{site.domain}"
+ )
+ end
+end