<?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>UgoTrade &#187; Ecological Intelligence</title>
	<atom:link href="http://www.ugotrade.com/category/smart-planet/ecological-intelligence-smart-planet/feed/" rel="self" type="application/rss+xml" />
	<link>http://www.ugotrade.com</link>
	<description>Augmented Realities at the Edge of the Network</description>
	<lastBuildDate>Wed, 25 May 2016 15:59:56 +0000</lastBuildDate>
	<language>en-US</language>
		<sy:updatePeriod>hourly</sy:updatePeriod>
		<sy:updateFrequency>1</sy:updateFrequency>
	<generator>https://wordpress.org/?v=3.9.40</generator>
	<item>
		<title>Platforms for Growth and Points of Control for Augmented Reality: Talking with Chris Arkenberg</title>
		<link>http://www.ugotrade.com/2010/10/27/platforms-for-growth-and-points-of-control-for-augmented-reality-talking-with-chris-arkenberg/</link>
		<comments>http://www.ugotrade.com/2010/10/27/platforms-for-growth-and-points-of-control-for-augmented-reality-talking-with-chris-arkenberg/#comments</comments>
		<pubDate>Wed, 27 Oct 2010 09:14:49 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[AR and html 5]]></category>
		<category><![CDATA[AR eyewear]]></category>
		<category><![CDATA[AR eyewear for smart phones]]></category>
		<category><![CDATA[ardevcamp]]></category>
		<category><![CDATA[arduino]]></category>
		<category><![CDATA[ARWave]]></category>
		<category><![CDATA[augmented foraging]]></category>
		<category><![CDATA[augmented reality event]]></category>
		<category><![CDATA[augmented reality eyewear]]></category>
		<category><![CDATA[augmented reality on tablets]]></category>
		<category><![CDATA[augmented reality search]]></category>
		<category><![CDATA[cloud computing and AR]]></category>
		<category><![CDATA[EarthMine]]></category>
		<category><![CDATA[gartner hype cycle]]></category>
		<category><![CDATA[Gary Hayes]]></category>
		<category><![CDATA[John Battelle]]></category>
		<category><![CDATA[Kevin Slavin]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[location based services]]></category>
		<category><![CDATA[Metaio]]></category>
		<category><![CDATA[Mobile AR]]></category>
		<category><![CDATA[mobile social augmented reality]]></category>
		<category><![CDATA[MUVEdesign]]></category>
		<category><![CDATA[NVidia augmented reality demo]]></category>
		<category><![CDATA[Ogmento]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[Platforms for Growth]]></category>
		<category><![CDATA[Points of Control Map]]></category>
		<category><![CDATA[Porthole]]></category>
		<category><![CDATA[QR codes]]></category>
		<category><![CDATA[Qualcomm SDK for AR]]></category>
		<category><![CDATA[real time analytics and AR]]></category>
		<category><![CDATA[RFID]]></category>
		<category><![CDATA[Simple Geo]]></category>
		<category><![CDATA[The Battle for the Internet Economy]]></category>
		<category><![CDATA[Tim O'Reilly]]></category>
		<category><![CDATA[Total Immersion]]></category>
		<category><![CDATA[transmedia story telling]]></category>
		<category><![CDATA[trasmedia]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[Ushahidi]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[vision based AR]]></category>
		<category><![CDATA[W3C group on augmented reality]]></category>
		<category><![CDATA[Wave in a Box]]></category>
		<category><![CDATA[Web 2.0 Expo]]></category>
		<category><![CDATA[web standards based browser for AR]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=5924</guid>
		<description><![CDATA[The Points of Control map is interactive, so please click here or on the image above for the full experience. Today at 4pm EST, 1pm PDT John Battelle and Tim O&#8217;Reilly will discuss the Points of Control map and The Battle for the Internet Economy in a Free Webcast: &#8220;More than any time in the [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://map.web2summit.com/"><img class="alignnone size-medium wp-image-5931" title="Screen shot 2010-10-27 at 1.56.15 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-1.56.15-AM-300x181.png" alt="Screen shot 2010-10-27 at 1.56.15 AM" width="300" height="181" /></a></p>
<p><em>The Points of Control map is interactive, so please <a href="http://map.web2summit.com/" target="_blank">click here </a>or on the image above for the full experience.</em></p>
<p><em> </em>Today at 4pm EST, 1pm PDT John Battelle and Tim O&#8217;Reilly will discuss the <a href="http://map.web2summit.com/" target="_blank">Points of Control</a> map and The Battle for the Internet Economy <a href="http://oreilly.com/emails/poc_web2summit-webcast-prg.html" target="_blank">in a Free Webcast</a>:</p>
<p><strong>&#8220;More than any time in the history of the Web, incumbents in the network  economy are consolidating their power and staking new claims to key  points of control. It&#8217;s clear that the internet industry has moved into a  battle to dominate the Internet Economy.</strong></p>
<p><strong>John Battelle and Tim O&#8217;Reilly will debate and discuss these shifting  points of control as the board becomes increasingly crowded. They&#8217;ll map  critical inflection points and identify key players who are clashing to  control services and infrastructure as they attempt to expand their  territories. They&#8217;ll also explore the effect these chokepoints could  have on people, government, and the future of technology innovation.&#8221;</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.01.38-AM.png"><img class="alignnone size-medium wp-image-5932" title="Screen shot 2010-10-27 at 2.01.38 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.01.38-AM-300x124.png" alt="Screen shot 2010-10-27 at 2.01.38 AM" width="300" height="124" /></a></p>
<p><em> </em>I&#8217;ve been wanting to start a discussion on theÂ  <a href="http://map.web2summit.com/">Points of Control map </a>in the Augmented Reality community for a while now, and Chris&#8217; recent post on <a href="http://www.gartner.com/it/page.jsp?id=1447613" target="_blank">the latest edition of the Gartner Hype Cycle</a>, <a href="http://www.urbeingrecorded.com/news/2010/10/13/is-ar-ready-for-the-trough-of-disillusionment/" target="_blank">&#8220;Is AR Ready for the Trough of Disillusionment?&#8221; </a>and this post by Mac  Slocum, <a href="http://radar.oreilly.com/2010/10/two-ways-augmented-reality-app.html" target="_blank">â€œHow Augmented Reality Apps Can Catch On,&#8221;</a> and the conversation in the comments between Mac, Raimo (one of the founders of <a href="http://www.layar.com/" target="_blank">Layar)</a>, and Chris, all prompted me to get a conversation started&#8230;(see below for all that followed!).Â  Chris put me on the hot seat back in June when he did <a href="http://www.boingboing.net/2010/06/17/tish-shute---augment.html" target="_blank">this very generous interview with me on Boing Boing</a>, so it was time to turn the tables.</p>
<p>Tim O&#8217;Reilly, in hisÂ <a href="http://www.youtube.com/watch?v=3637xFBvkYg&amp;p=6F97A6F4BA797FB3" target="_blank"> keynote for Web 2.0 Expo,</a> pointed out there is both a fun and a dark side to the Points of Control map.Â  There are companies on this map, he noted, that rather than &#8220;growing the pie,&#8221; are  trying to divide up the pie, and they are forgetting to think about  creating a sustainable ecosystem. I expect the conversation between Tim O&#8217;Reilly and John Battelle to dig deep into this Battle for the Internet Economy.Â  If, like me, you have another engagement at the time of the webcast, you can register on the site to receive the recording.</p>
<p>AR is still too young to figure in the battles of the giants, but there will be a lot to be learned from this conversation.Â  And, The Points of Control map is good to think with from the POV of AR in many ways.Â  As Chris Arkenberg observed:</p>
<p><strong>&#8220;When I look at this map, the points of control map, itâ€™s  really interesting to me, because what it says to me with respect to AR  is each of these little regions that they have drawn out would be a  great research project. So every single one of these should be  instructive to AR.</strong></p>
<p><strong>In other words, we should be able to look at social networks,  the land of search, or kingdom of ecommerce, and apply some very  rigorous critical thinking to say, â€œHow would AR add to this engagement,  this experience of gaming, or ecommerce, or content?â€</strong></p>
<p><strong>Looking at each of these individually and really meticulously  saying, â€œOK, well yes, it can do this but how is that different from  the current screen media experience, the current web experience that we  have of all these types of things?â€ Â  You know, how can augmented  reality really add a new layer of value and experience to these? And I  think that process would really trim a lot of the fat from the hopes and  dreams of AR and anchor it down into some very pragmatic avenues for  development.Â   And then you could start looking at, â€œWell, OK, what  happens when we start combining these?â€ When we take gaming levels and  plug that into the location basin, as you suggested.&#8221;</strong></p>
<p>Chris Arkenberg is a technology professional with a focus on product strategy &amp; development, specializing in 3D, augmented reality, ubicomp and the social web. He uses research, scenario planning, and foresight methodologies to help organizations anticipate change and adopt a resilient and forward-looking posture in the face of unprecedented uncertainty. His personal work is collected at <a href="http://urbeingrecorded.com " target="_blank">urbeingrecorded</a>, and his <a href="http://www.linkedin.com/in/chrisarkenberg" target="_blank">professional profile is here.</a></p>
<p>He is also one of the founder/organizers of <a href="http://ardevcamp.org" target="_blank">AR DevCamp</a> which is currently scheduled for Dec. 4th (somewhere in SF or The Valley!)Â  Chris said, &#8220;No further details atm (still trying to find a venue and get sponsors) but please direct people to http://ardevcamp.org for upcoming information.&#8221;</p>
<h3>Talking with Chris Arkenberg</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/ChrisArkenberg.jpg"><img class="alignnone size-medium wp-image-5929" title="ChrisArkenberg" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/ChrisArkenberg-300x199.jpg" alt="ChrisArkenberg" width="300" height="199" /></a></p>
<p><strong>Tish Shute:</strong> I know some people thought <a href="http://www.gartner.com/it/page.jsp?id=1447613" target="_blank">the positioning of AR by Gartner near the peak of the hype cycle </a>was misguided, and based on a very narrow understanding of AR as used in marketing apps. But reading your post I thought you made a lot of good points.</p>
<p><strong>Chris Arkenberg:  Itâ€™s tracking hype, right?  Itâ€™s not necessarily tracking the growth of the technologies or their maturation so much as itâ€™s tracking the general attention level.  And whatâ€™s interesting to me is that tends to affect the amount of money that goes into those technologies.</strong></p>
<p><strong>Tish Shute:</strong> I was particularly interested in your post because I have been writing a post about two recent Oâ€™Reilly events in NYC, <a href="http://makerfaire.com/newyork/2010/" target="_blank">Maker Faire</a>, <a href="http://www.web2expo.com/">Web 2.0 Expo</a>, and then <a href="http://www.cloudera.com/company/press-center/hadoop-world-nyc/" target="_blank">Hadoop World</a>, where Tim gave a very interesting 45 minute keynote.Â Â  AR was pretty low profile at all three events.Â Â <a href="../../augmented%20reality%20at%20web%202.0%20http://www.flickr.com/photos/bdave2007/5036397168/in/photostream/" target="_blank"> But the NVidia augmented reality demo attracted a lot of attention at the sponsors expo, </a> and Usman Haque, Founder of <a href="http://www.pachube.com/" target="_blank">Pachube</a> announced in<a href="http://www.web2expo.com/webexny2010/public/schedule/speaker/43845" target="_blank"> his presentation</a>,  they are working on an augmented reality interface for Pachube called Porthole, its designed for  facilities management and, â€œas a consumer-oriented application that  extends the universe of Pachube data into the context of AR â€“ a  â€˜portholeâ€™ into Pachubeâ€™s data environments.. &#8220;Â  Usman also mentioned, when I talked to him, that he is contributing to the AR standards discussion and on the program committee now <a href="http://www.w3.org/2010/06/16-w3car-minutes.html#item02" target="_blank">for the W3C group on augmented reality</a>.Â  For more on this standards discussion and the Pachube AR interface, see Chris Burmanâ€™s paper for the W3C, <a href="http://www.w3.org/2010/06/w3car/portholes_and_plumbing.pdf" target="_blank">Portholes and Plumbing: how AR erases boundaries between â€œphysicalâ€ and â€œvirtual.&#8221;</a></p>
<p>I think pioneers in the augmented reality commmunity should pay attention to these wider conversations about the Battle for the Internet Economy, and the exploration of theÂ  â€œPlatforms for Growthâ€ theme at <a href="http://www.web2expo.com/">Web 2.0 Expo</a> is very important- this is a course also a nudge to read my upcoming post on these O&#8217;Reilly events!</p>
<p>Also I have another project I have been chewing on that I would like to talk to you about. Â   I want to start an AR conversation about the wonderful <a href="http://map.web2summit.com/">Points of Control map</a> produce for Web 2.0 summit by <a href="http://battellemedia.com/" target="_blank">John Battelle</a>. [ Note there will be, "Battle for the Internet Economy" free Web2Summit webcast w/ @johnbattelle &amp; @timoreilly Wed 10/27 at 1pm PT http://bit.ly/b46cmb #w2s]</p>
<p>Up to this point, understandably given the immaturity of the technology, AR has little role in the â€œBattle for the Internet  Economy.â€Â    But this doesnâ€™t mean that the map isnâ€™t good for AR visionaries, enthusiasts, entrepreneurs, and developers to think with. Â   And both you and Tim have pointed out the potential for AR to leverage the giant data subsystems in the sky. Â  I have to say the positioning of Cloud Computing on the brink of heading down into the trough of disillusionment in this recent rendition of the Gartner Hype Cycle seems ridiculous!</p>
<p>Cloud Computing is already ubiquitous hardly seems credible that it is headed for a trough of disillusionment!</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.48.30-AM.png"><img class="alignnone size-medium wp-image-5940" title="Screen shot 2010-10-27 at 2.48.30 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.48.30-AM-300x199.png" alt="Screen shot 2010-10-27 at 2.48.30 AM" width="300" height="199" /></a></p>
<p><strong>Chris Arkenberg:  Yeah, itâ€™s ubiquitous so why even talk about it when itâ€™s your fundamental infrastructure?</strong></p>
<p><strong>Tish Shute:</strong> Yeah and I seriously doubt it is  imminently headed for a  trough of disillusionmentâ€¦.and this brings me back to the Points of Control Map which as John Batelle points out,  â€œaims to  identify key players who are battling to control the services and infrastructure of a websquared worldâ€ in which the â€œWeb and the world intertwine through mobile and sensor platforms.â€Â   This instrumented world, of course, creates a great deal of opportunity for augmented reality.  Have you seen that, that points of control map?</p>
<p><strong>Chris Arkenberg:  I think I have, actually.</strong></p>
<p><strong>Tish Shute: </strong> There has been much debate about how this intertwining of the web and  the world will play out in augmented reality.Â Â  Chris Burman points out in his position paper for W3C,Â  <a href="http://www.w3.org/2010/06/w3car/portholes_and_plumbing.pdf" target="_blank">Portholes and Plumbing: how AR erases boundaries between â€œphysicalâ€ and â€œvirtualâ€</a>, that &#8220;trying to draw parallels between a browser based web and the possibilities of AR may solve issues of information distribution in the short-term,&#8221;Â  but it must not have a limiting effect in the long-term.Â Â  But now we at least have one <a href="https://research.cc.gatech.edu/polaris/" target="_blank">web standards-based browser for AR</a> thanks to the work of Blair MacIntyre and the Georgia Tech team.Â  But  I think the discussion in the comments of Mac Slocumâ€™s recent post, <a href="http://radar.oreilly.com/2010/10/two-ways-augmented-reality-app.html" target="_blank">â€œHow Augmented Reality Apps Can Catch Onâ€</a> is an interesting starting point from which to think about platforms of growth for AR.Â   I am not sure if I am stretching his meaning but I think Raimo, <a href="http://www.layar.com/" target="_blank">Layar</a>, is suggesting that what the Point of Control map call the the Plains of Media content is very important to the growth of the fledgling AR industry right now.   And I would agree with this, and add that the neighboring terrain of gaming levels will be pretty key as one of my other favorite AR start ups <a href="http://ogmento.com/" target="_blank">Ogmento</a> hopes to reveal in the near future!  But what do you think was most important in this brief but pithy dialogue between you Raimo and Mac?</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.56.02-AM.png"><img class="alignnone size-medium wp-image-5941" title="Screen shot 2010-10-27 at 2.56.02 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2010/10/Screen-shot-2010-10-27-at-2.56.02-AM-300x179.png" alt="Screen shot 2010-10-27 at 2.56.02 AM" width="300" height="179" /></a></p>
<p>[The screenshot above isÂ <a title="MuveDesign" href="http://www.muvedesign.com/"></a>a teaser video the <a title="Gary Hayes" href="http://www.personalizemedia.com/future-of-location-based-augmented-reality-story-games/?utm_source=feedburner&amp;utm_medium=twitter&amp;utm_campaign=Feed:+PersonalizeMedia+%28PERSONALIZE+MEDIA%29" target="_blank">Gary Hayes</a> from <a title="MuveDesign" href="http://www.muvedesign.com/">MUVEdesign</a> for his upcoming (2011 release date), game called Time Treasure.Â  See Gary&#8217;s <a title="Gary Hayes" href="http://www.personalizemedia.com/future-of-location-based-augmented-reality-story-games/?utm_source=feedburner&amp;utm_medium=twitter&amp;utm_campaign=Feed:+PersonalizeMedia+%28PERSONALIZE+MEDIA%29" target="_blank">blog</a> for more and Gary&#8217;sÂ <a href="http://www.personalizemedia.com/16-top-augmented-reality-business-models/" target="_blank"> post from over a year ago</a> on AR Business models.Â  Thomas K. Carpenter, <a href="http://gamesalfresco.com/2010/10/25/time-treasure-future-tablet-game/" target="_blank">on Games Alfresco notes</a>, &#8220;I think this is a terrific idea and I find it interesting heâ€™s planning this on a tablet rather than a smartphone.&#8221;</p>
<p><strong>Chris Arkenberg:  The way I took itâ€¦And to give a little bit of context, I came from sort of this apprehension of augmented reality as an expression of the existing Internet.  So as sort of a visualization layer that allows you to kind of draw out data, and then, with all the affordances of being able to anchor it to real world things.</strong></p>
<p><strong>And my own sort of path has led me to want to really try to understand that and refine it, particularly with respect to the sort of Internet of things and the smarter planet idea of just having embedded systems everywhere.  And specifically, what is the value-add  for augmented reality as a visualization layer of an instrumented world?</strong></p>
<p><strong>And so thatâ€™s caused me to be a bit biased towards that side of AR.  And the way I took Raimoâ€™s comment was that he was saying that, â€œYou know, really what weâ€™re interested in is media.â€  That he was effectively saying that AR for them is really just about that space between the screen and the the world, or between your eyes and the world, and what you can do there.</strong></p>
<p><strong>Certainly I had considered it in the past, but I hadnâ€™t really focused on it or assumed that it was a priority as a business model.  And so he kind of reminded me that, actually, thereâ€™s a lot of entertainment applications.  Thereâ€™s a lot of, obviously, advertising and marketing applications.<br />
And so I felt that I was being a little narrow in my focusâ€¦</strong></p>
<p><strong>Tish Shute: </strong> Yes this comes to the heart of what I am interested in about the role AR can play in opening up new relationships to the world of data that we live, not just making it more accessible and useful to us when and where we need it, but AR as a road to reimaginingÂ  it..</p>
<p>Have you seen any interesting work yet to explore these great data economies in the cloud through AR.  I mean can you think of any others &#8211; there is <em><em><a href="http://www.planefinder.net/" target="_blank">planefinder.net</a> </em></em> but others?</p>
<p><strong>Chris Arkenberg:  Iâ€™ve seen a few just sort of skunk works type applications that people have been playing around with, again, to try and reveal things.  One of them was similar to the aircraft, but it was more for military use and being able to identify things of interest in the sky.  Iâ€™ve seen a couple other for navigation, so being able to identify mountain peaks on a visual plane, for example, but this isnâ€™t so much about revealing an instrumented world.</strong></p>
<p><strong>Tish Shute:</strong> Yeah, I think that was from the Imagination right?  I know thatâ€™s an interesting one. Usman at Web 2.0 Expo, <a href="http://www.web2expo.com/webexny2010/public/schedule/speaker/43845" target="_blank">in his presentation,</a> mentioned the work Pachube is doing on an Augmented Reality interface.  I interviewed Usman again as my last long interview with him was nearly 18 months ago now and Pachube is well on the way to becoming the Facebook of Data or the analogy that Usman prefers &#8211; the Twitter of sensors!</p>
<p><strong>Chris Arkenberg:  Hmm, interesting.</strong></p>
<p><strong>Tish Shute:</strong> And to go back to your comments on Augmented Reality not getting caught in some of the traps that have made virtual worlds lose relevancy I think that is vital that AR developers understand the strategic possibilities of key points of control in the internet economy because the isolation and Balkanization of virtual worlds were certainly a factor in their rapid slide into the trough of disillusionment &#8211; although many would argue that a fundamental flaw in the kind of virtual experience that Second Life and other virtual worlds constructed was really the fatal flaw (see James Turner&#8217;s interview with Kevin Slavin <a href="http://radar.oreilly.com/2010/09/drawing-the-line-between-games.html" target="_self">Reality has a gaming layer</a>).</p>
<p>But Second Lifeâ€™s isolation from the other great network economies of the internet was certainly a limiting factor.</p>
<p><strong>Chris Arkenberg:  And thatâ€™s been exactly my sense, and Iâ€™ve, over the years, tried to encourage development in that direction for virtual worlds.  I did work, through Adobe, to help develop Atmosphere 3D back in the the early 2000â€™s.  And we did a lot of work to try and understand the marketplace and the specific value-add of doing things in 3D over 2D.</strong></p>
<p><strong>And this is kind of why I keep referring back to VR and VWâ€™s with respect to augmented reality, is that with immersive worlds, there was this ideaâ€¦there was this big rush.  Everybody was so excited about it.  It was obviously the next cool thing.  And everybody wanted to try to do everything in it.  You could do your shopping in virtual worlds. You could have meetings in virtual worlds.</strong></p>
<p><strong>Tish Shute:</strong> and  shopping, yes ..that didn&#8217;t work out so well!</p>
<p><strong>Chris Arkenberg:  And everybody was very excited in developing these things.  And what it really came down it is, â€œYeah, you can, but itâ€™s actually a lot better to do those things on a flat plane or in person.â€  Meeting Place, WebEx, TelePresence &#8211; those tools generally do a much better job at facilitating TelePresence meetings than a virtual world does. The same with TelePresent Education. There are only very specific things that both VR and AR are really good at.</strong></p>
<p><strong>And thatâ€™s where I find myself with augmented reality right now, trying to really pick through that and critically look at which uses are really appropriate for an AR overlay. And again, I think thatâ€™s why the hype cycle is important, because it reflects back this desire that AR is going to be the next big thing &#8211; the be-all, end-all of interacting with data in the cloud &#8211; and forces us all to take a critical look at why we should do things in AR instead of on a screen.</strong></p>
<p><strong>AR is not going to work well for most things but itâ€™s going to be very good for certain uses.  Right now Iâ€™m very keen at trying to understand what those things might be.</strong></p>
<p><strong>Tish Shute:</strong> I had this wonderful conversation (more in an upcoming post) with Kevin Slavin one of the founders of <a href="http://areacodeinc.com/" target="_blank">Area/Code</a> at Web 2.0 Expo and I think some of what he describes about the data brokerages of High Frequency trading have some interesting implications for ARâ€™s role, say, in ubiquitous computing.  The trading markets are now pretty much dominated by machine to machine intelligence; machine to machine brokerages.  They are basically game economies on the scale that we can barely wrap our heads around where the speed that bots and algo traders can access the network is the key.  We really have no clue what is going on  until we lose our houseâ€¦</p>
<p>Kevin was also<a href="http://radar.oreilly.com/2010/09/drawing-the-line-between-games.html" target="_blank"> interviewed by James Turner on Oâ€™Reilly Radar.</a> He talked about how much of the interesting work in location based mobile social apps is defined in opposition to the model of Second Life.  He also talked to me about  how we are seeing â€œfirst lifeâ€ take on the qualities of â€œsecond life.â€  What goes on the trading floor is largely a performance secondary to a more important world of machine intelligence with giant co-located servers  and bots fighting for trading advantages measured in fractions of seconds.</p>
<p>He pointed out how we draw on all these tropes from sci-fi movies, these HUDs based on ideas of machine intelligence where the robot talks to the other robot in English through an English HUD!Â  Many of our current visual tropes for AR are perhaps just as inadequate for the kind of data driven world we live in.</p>
<p>Of course, when you are thinking of having fun with  dinosaurs, or illustrated books, or whatever, this is not, perhaps, an issue.Â  But if you are thinking of augmented reality interfaces as being important in a battle for network economy, and platforms for growth,Â  how this new interface helps us live better in a world of data is an important issue.</p>
<p><strong>Chris Arkenberg:  Now, does that indicate that the UI just needs more overhaul and innovation, or more that the visual interface for those experiences shouldnâ€™t really leave the screen?  It shouldnâ€™t move on to the view plane?</strong></p>
<p><strong>Tish Shute: </strong> Yes we have a few concept videos that try and explore this ..</p>
<p><strong>Chris Arkenberg:  Well, and I think this will happen at the level of human-computer interface.  I mean thatâ€™s always been its role, in making coherent the sort of machine mind, for lack of a better term, making it coherent to the human mind. So I mean there is a lot of this sort of machine intelligence, the semantic Web 3.0 revolution, where it really is about enabling machines, and agents, and bots to understand the content that weâ€™re feeding them.</strong></p>
<p><strong>But at the end of the day, they, for now, need to be providing value to us human operators. So thereâ€™s always going to be a role for  human-computer interface and user experience design to make this stuff meaningful.</strong></p>
<p><strong>I mean, if you look at the revolution in visualization &amp; data viz, this is of incredible value because it takes a tremendous amount of data and collates it into a glanceable graphic that you can look at and immediately comprehend massive amounts of data because itâ€™s delivered in a handy, visual way.</strong></p>
<p><strong>So I see that as a fascinating design challenge, how the user experience of the data world can be translated into meaningful human interaction.</strong></p>
<p><strong>Tish Shute:</strong> Yeah.  And when we see <a href="http://stamen.com/" target="_blank">Stamen Design</a> pursuing a big idea in AR, thatâ€™s when we might start to rock and roll, right?</p>
<p><strong>Chris Arkenberg:  Yeah. In my article, I sort of jokingly suggested that Apple will create the iShades.  But, theyâ€™ve got the track record of being way ahead of the curve and delivering the future in very bold forms.</strong></p>
<p><strong>Tish Shute:</strong> A key part for the battle for the network economy is to bring the complexity of data into the human realm in a way that increases human agency.  Kevin suggests that the giant robot casinos of markets should actually lift off into total abstractions as theses machine-driven trades get back into the human realm in ways that are so damaging to our lives &#8211;  a lost house or job!  The notion of a counterveillence society where people have more agency over the important aspects of their lives, health, housing, job (which I discussed with Kevin &#8211; interview upcoming) has gotten pretty tricky!</p>
<p>But I think we will begin to see AR eyewear for specific applications (gaming and industrial) get more common fairly soon &#8211; possibly as smart phone accessories.</p>
<p>And it is clear that AR is going to be, increasingly,Â  a part of our entertainment smorgesborg in coming months. Itouch has a camera (although lower resolution),  Nintendoâ€™s are AR-ready and many aspects of the AR vision of hands-free spatial interfaces will go mainstream through Natal.</p>
<p>But we are yet to see an app/platform emerge for  mobile. Social AR games that turn every bar and cafe and ultimately the whole city into a gaming venueÂ  -although I think Ogmento and MUVE aim to lead the way here!  Will an AR company achieve Zynga level success by using the Foursquare, for example?</p>
<p>My feeling is that the lesson of Zynga is pretty important for mobile social AR games.  Could Flash social gaming have taken off without Facebook?</p>
<p><strong>Chris Arkenberg:  And thatâ€™s the real driver.  And again, as you mentioned with Second Life, and this was exactly my own sense, is that they stuck to the closed garden model and didnâ€™t get the power of social and collaboration.  They attempted to add some of those affordances within the world, but, you know, ultimately most people arenâ€™t in virtual worlds, and most people arenâ€™t using augmented reality.  So leveraging the really predominate platforms like Twitter and Facebook and Foursquare, being able to leverage those affordances, that connectivity, into a platform like augmented reality, I think, is really critical. Because again, you get nothing unless you have the masses, unless you have people present.</strong></p>
<p><strong>Tish Shute:</strong> In AR research there is a long history of the  notion of powerful AR-dedicated devices, but smart phones and tablets are good enough,Â  and can launch augmented reality into the heart of the internet economy.  I thinkÂ  the elusive AR eyewear will come to us initially as a smart phone accessory for specific apps.Â  But, for the moment, most AR apps make little attempt to play in the wider internet economy.</p>
<p><strong>Chris Arkenberg:  And I think itâ€™s actually much lower hanging fruit, really, to do gaming, marketing, transmedia.  Because then you donâ€™t really care about the cloud, or maybe you only really care about a little part of it that your gaming property is addressing. Then it becomes much more about entertainment, and much more about persuasion, and sensationalism.  And if youâ€™ve got dancing dinosaurs on your street, great!  Itâ€™s entertaining, itâ€™s cool, itâ€™s new. That stuff is fairly straightforward.</strong></p>
<p><strong>I keep coming back to this idea of, you know, the instrumented city.  What sort of data trails do you get out of a fully instrumented city?  So maybe you get traffic patterns, maybe you get geo-local movements of masses, maybe you get energy usage, that sort of thing, all the, sort of  heat maps you can generate from a city. But then what good does it do to be able to have that on an augmented reality layer versus just looking at it on a mobile device or looking at it on your laptop?</strong></p>
<p><strong>Tish Shute:</strong> Of course the use cases for â€œmagic lensâ€ AR are different from the kind of hands free, 360 view with tightly registered media, that a full vision of AR has always promised.  The 360 view is  quite a different metaphor from the web and mobile rectangular screens.</p>
<p><strong>Chris Arkenberg:  Yes, yes.</strong></p>
<p><strong>Tish Shute:</strong> Did you see that <a href="http://laughingsquid.com/tweet-it-ipads-vs-iphones-a-parody-of-michael-jacksons-beat-it/" target="_blank">great parody of Michael Jackson&#8217;s</a> â€œBeat Itâ€ with the iPads versus the iPhones, right?</p>
<p><strong>Chris Arkenberg:  Oh, really?</strong></p>
<p><strong>Tish Shute:</strong> I tweeted it cos i thought it was quite funny and a little close to the bone!<br />
[laughter]</p>
<p>&#8220;ur wanna an ipatch 2 b the new fad?&#8221; #AR gets cameo in Twitter, iPads &amp; iPhone&#8217;s Michael Jackson-Inspired Parody via @mashable</p>
<p>It is hard to get away from the importance of eyewear when discussing AR!</p>
<p><strong>Chris Arkenberg: Yes, so the hardware, to me, is a big stumbling point right now, or itâ€™s a large gating factor, I think, for realizing what an augmented reality vision could really be like.  That it really does need to be heads up.  This holding the phone up in front of you is fun to demonstrate that itâ€™s possible, and itâ€™s valuable in some waysâ€¦</strong></p>
<p><strong>Tish Shute:</strong> And itâ€™s particularly nice in some applications like the planes app, the Acrossair subway app where you hold the phone down and get the arrow, right?</p>
<p><strong>Chris Arkenberg:  Yeah, the way-finding stuff I think is really valuable&#8230;</strong></p>
<p><strong>Tish Shute:</strong> Sixth Sense really caught peopleâ€™s imagination because it managed to deliver the gesture interface with cheap hardware, even if projection has limited uses (no brightly lit spaces or privacy for example!).</p>
<p>The other important and as yet unrealized part of the AR dream is  real-time communications.  Many interesting uses cases would require this. As you know that is my chief excitement, along with federation,  in the Google Wave Servers for (which should soon be released at <a href="http://googlewavedev.blogspot.com/2010/09/wave-open-source-next-steps-wave-in-box.html" target="_blank">Wave in a Box</a>) for <a href="http://www.arwave.org/" target="_blank">ARWave</a>.</p>
<p><strong>Chris Arkenberg:  Well my sense of Wave is that it was a ChromeOS protocol that they instantiated, or that they exhibited in the public deployment of Google Wave.  That that was a proof of their sort of low level architectural solution.  Because, you know, theyâ€™ve been rumored to be working on this cloud OS for some time. And so my sense is that Wave is actually one of their core components of that cloud OS, and that it just happened to incarnate for the public in a test run as Google Wave.</strong></p>
<p><strong>Tish Shute:</strong> I do hope that Wave  In the Box will lower the barriers to entry to people experimenting with this technology.  The FedOne server was just way too hard for most people to take the time to set up.  Of course, it is the brilliance of the Wave Operational Transform work that also poses problems in terms of ease of use. But Wave Federation Protocol is pretty innovative. And could even play an important role in a real time communications for AR eyewear connected to smartphones. The challenges that Wave takes on re real-time communications, federation, permissions and filters are pretty important ones for ARâ€¦</p>
<p><strong>Chris Arkenberg:  Especially when youâ€™re trying to federate a lot of permissions and filter a lot of data, which all of that gets even more important when you have a visual layer between you and the real world.</strong></p>
<p><strong>Tish Shute:</strong> You got it.  Yeah!</p>
<p><strong>Chris Arkenberg:  I think thatâ€™s really valuable real estate, both for third parties that want to get access to your eyes, as well as for you, as the user, who still needs to navigate through the phenomenal world and not be occluded by massive amounts of overhead data.</strong></p>
<p><strong>Tish Shute:</strong> Yes, I am sure Google has big plans for the next level of cloud computing and Wave looks at some key challenges.  I suppose federation poses some key business problems.  I think it was Michael Jones who said to me that it was a bit like socialism in that you have to be willing to give something up for the greater good.</p>
<p>Perhaps federation does not present enough appeal because of its challenges re business models?</p>
<p><strong>Chris Arkenberg:  Well, I wonder.  I mean thereâ€™s got to be some value for their ad platform as ads are moving more towards this personalized experience.  Advertising is becoming less of a shotgun blast and more of a very precise, surgical strike. So being able to track user data to such a fine degree to mobilize the appropriate ads around them wherever they are, on any platform, is certainly very valuable to Google and their ad ecology.</strong></p>
<p><strong>Tish Shute:</strong> Many people have high hopes that HTML 5 by lowering the barrier of entry forÂ  browser style AR could also pave the way for some interesting AR work..</p>
<p><strong>Chris Arkenberg:  Well, as much as I would hope that all the different players are going to come together and establish some shared set of standards, really, whatâ€™s happening is itâ€™s a rush to the finish line to be the firstâ€¦to get the most penetration in the marketplace so that Layar, for example, can say, â€œItâ€™s official.  Weâ€™re the platform.â€  And then the consolidation that will follow, where the Googles and the other big players like Qualcomm say, â€œOK, itâ€™s mature enough.  Weâ€™ll start buying up all the smaller companies.â€</strong></p>
<p><strong>And thatâ€™s where the real challenge is right now is that there are no standards.  Itâ€™s such an immature technology that you have a lot of different players trying to establish the ground rules.  And again, this is one of the challenges that faced public virtual worlds, is that you had a lot of different virtual worlds that werenâ€™t talking to each other in any particular way, and that they each had their own development platform. And so you end up with a very fractured ecosystem or set of competing ecosystems, which is kind of whatâ€™s happening with AR right now, where a developer has to choose between a number of different new platforms or hedge by deploying across multiple platforms. Basically, the web browser wars are set to be recapitulated by the AR browsers.</strong></p>
<p><strong>Among them, Layar and Metaio seem to be getting the most traction.  But thereâ€™s still not a really strong case for a unified development ecosystem to emerge.</strong></p>
<p><strong>Tish Shute:</strong> So a discussion of ecosystem development brings us back to the Points of Control Map I think. So what do you see as key points of interest for AR developers to watch in the  Points of Control Map? And where do you want to sort of put your bets, right?  We are still really waiting for mobile social AR to emerge into the mainstream.</p>
<p><strong>Chris Arkenberg:  Yes.  And thatâ€™s primarily the shortcoming of  the hardware itself, but also of the accuracy of current GPS technology.  Thatâ€™s another kind of gating factor, because again, AR wants to be able to express the data within a distinct place or object.</strong></p>
<p><strong>So in a lot of ways, other than kind of what weâ€™ve allowed for the broader entertainment purposes, for AR to really work, there needs to be more resolution in GPS location.  So for it to be truly locativeâ€¦because itâ€™s OK to tell Foursquare that youâ€™re in Bar X.  But if you want to be able to draw data directly on a wall within that bar, or do advertising over the marquee on the front, you need more factors to accurately register those images on a discreet location. So thatâ€™s another, sort of, aspect of the immaturity of AR, is that itâ€™s still very hard to register things on discreet locations without employing a number of diverse triangulation methods.</strong></p>
<p><strong>Tish Shute:</strong> Right.  The mobile AR games we see at the moment are really just faking a relationship to the physical world unless they rely on markers or some limited form of natural feature recognition which is really just a more sophisticated form of markers.  But the Qualcomm  SDK does offer some opportunities to tie AR media to the world more tightly as does the Metaio SDK. But in terms of a mobile social AR game that could be like the Cape of Zynga to FourSquare in Location Basin [see the <a href="http://map.web2summit.com/">Points of Control map</a>]&#8230; We havenâ€™t seen anything close yet.</p>
<p>AR should be able to bring the check-in mode to any object in our environment.</p>
<p><strong>Chris Arkenberg:  Yes, yes.  And thatâ€™s actually one of the early interests I had in the notion of social augmented reality. I wanted a way to tag my community with invisible annotations that only certain people could read, and found pretty quickly that thatâ€™s very difficult to do.  I mean you can kind of do some regional tagging, like on a  beach, for example, but if you wanted to tag the bench that was on the cliff above the beach, itâ€™s very difficult to do that using strictly locative reckoning.</strong></p>
<p><strong>Thereâ€™s all sorts of really cool social engagement that can be revealed when people are allowed to attach things to the world around them, to the streets they normally pass through, or the points of interest that they normally engage in. To be able to author on the fly on the streets and attach it discreetly to an object effectively.</strong></p>
<p><strong>Tish Shute:</strong> And yes we do have all kinds of markers and QR codes.  But Erick Schonfeld of Tech Crunch<a href="http://techcrunch.com/2010/10/18/likify-qr-code/" target="_blank"> made a good point that QR codes</a>: &#8220;Until QR code scanners become a default feature of most smartphones and  they start to become actually useful enough for people to go through the  trouble to scan them, they will remain a gee-whiz feature nobody uses.&#8221;</p>
<p><strong>Chris Arkenberg:  So again, this gets back to competing standards and who gets access to the phone stack, the bundle. Who gets the OEM dealâ€¦?</strong></p>
<p><strong>Tish Shute:</strong> Yes, the battles for the networks on the Handset Plains are pretty important for AR!<br />
[laughter] I think Layar have made some smart moves on The Handset Plains.</p>
<p>And there are a lot of acquisitions of nearfield technology to look at.Â   If I remember rightly Ebay bought the Red Laser tech from Occipital &#8211; now thereâ€™s any interesting company. Their panorama stuff rocks!</p>
<p><strong>Chris Arkenberg:  Right. Thereâ€™s a lot of nearfield stuff thatâ€™s supposed to hit all of the major mobile platforms in the next year or so.</strong></p>
<p><strong>I mean I think where this is heading, in my mind, is basically smart motes.  You know, little nearfield wide-range RFIDâ€™s that are the size of a small, tiny square that you could attach to just about anything and then program it to be a representative of your establishment or of an object, that then you can start to tag just about anything. I mean you canâ€™t rely on geo to do it, but if you have a Nearfield chip there that costs maybe like two cents to buy in bulk, and you can flash program it, then you can start to attach data to just about anything.</strong></p>
<p><strong>Tish Shute:</strong> Yes &#8216;cos some things still remain very difficult for near field image recognition technologies like Google Goggles.</p>
<p><strong>Chris Arkenberg:  Well, if your phone can interrogate for Nearfield devices, and it detects a chip in its near field, it can then interrogate that chip.  The chip may contain flash data on itself, or it may contain the local server in the establishment, or it may go to the cloud and get that data back.</strong></p>
<p><strong>Tish Shute:</strong> Yes there is moverment from the top and open source hardware like Arduino has created an opportunity for all sorts of creativity with instrumented environments.Â  And the handheld sensors in our pockets &#8211; our smart phones create a lot of opportunity for bottom up innovation too.</p>
<p><strong>Chris Arkenberg:  I mean thatâ€™s my guess.  If you look at what IBM is doing with their Smarter Planet initiative, theyâ€™re partnering with a lot of municipalities, and obviously with a lot of businesses and their global supply chains.</strong></p>
<p><strong>But theyâ€™re basically working with municipalities and all these stakeholders to instrument their territory, their business, or their city, as it were. So theyâ€™re working to provide embedded sensors and the software necessary to read them out and run reports &amp; viz.  And presumably that software can extend to include some sort of mobile device to interrogate the sensors and read the data.</strong></p>
<p><strong>Thatâ€™s kind of a top-down approach of a very large global company working with top-down governance bodies to do this. Simultaneously you have the maker crowd experimenting with Arduino and such to build from the grassroots, the bottom up approach.</strong></p>
<p><strong>And thatâ€™s primarily gated by the amount of learning it takes to be able to program these devices, to be able to hack them.  Typically, the grassroots creators who make these devices donâ€™t have the luxury of very large budgets to make things highly usable and Wizywig.</strong></p>
<p><strong>So the bottom up community is a sandbox to create tremendous amounts of innovation, because they are unconstrained by the very real financial needs of the top down innovators.  And so you get a lot of fascinating innovation, a very rich ecology from the bottom-up approach, but you donâ€™t get a lot of wide distribution.  But that does filter up to and inform the top down approach that has a lot more money to put into this stuff.  And it ultimately has to respond to the needs of the marketplace.</strong></p>
<p><strong>I mean if thereâ€™s an answer to the question of whether something like AR will succeed through the bottom-up grassroots approach or the top-down industry approach, I would say it would be both.  That handsets will be hacked to read the bottom up innovations of the maker community, and handsets will be preprogrammed to read the top down efforts of the IBMs of the world.</strong></p>
<p><strong>Tish Shute:</strong> Yes but i have to say it is very time-consuming hacking phones (I have just seen a few days suck up in this myself so that I could upgrade my G1 to try out the new ARWave client!).  I mean Android has obviously been the platform of choice because of openness but the business model of iPhone and its market share in the US sure make it important for developers.Â   Itâ€™s like you donâ€™t exist if you donâ€™t have an iphone app for what you are doing.</p>
<p><strong>Chris Arkenberg:  Yeah, and thatâ€™s the challenge, because at the end of the day developers prefer not to work for free and a solid, reliable mechanism to monetize their efforts becomes very appealing.</strong></p>
<p><strong>When I look at this map, the points of control map, itâ€™s really interesting to me, because what it says to me with respect to AR is each of these little regions that they have drawn out would be a great research project. So every single one of these should be instructive to AR.</strong></p>
<p><strong>In other words, we should be able to look at social networks, the land of search, or kingdom of ecommerce, and apply some very rigorous critical thinking to say, â€œHow would AR add to this engagement, this experience of gaming, or ecommerce, or content?â€</strong></p>
<p><strong>Looking at each of these individually and really meticulously saying, â€œOK, well yes, it can do this but how is that different from the current screen media experience, the current web experience that we have of all these types of things?â€  You know, how can augmented reality really add a new layer of value and experience to these? And I think that process would really trim a lot of the fat from the hopes and dreams of AR and anchor it down into some very pragmatic avenues for development.  And then you could start looking at, â€œWell, OK, what happens when we start combining these?â€ When we take gaming levels and plug that into the location basin, as you suggested.</strong></p>
<p><strong>Tish Shute: </strong> Some of the important platforms for AR donâ€™t appear to have spots on the map like Google Street View and other mapping technologies that hold out so much hope for AR, or am I missing something?</p>
<p><strong>Chris Arkenberg:  You mean on the map?</strong></p>
<p><strong>Tish Shute:</strong> Yes for the full vision of AR we need sensor integration, computer vision and cool mapping technologies to come together. Do you see where Google Maps and Google Street View&#8230; Where would they be?</p>
<p><strong>Chris Arkenberg:  Yeah, I mean itâ€™s certainly content, itâ€™s locationâ€¦</strong></p>
<p><strong>Are you familiar with Earthmine?</strong></p>
<p><strong>Tish Shute:</strong> Yes, yes I am, definitely.<a href="http://www.earthmine.com/index" target="_blank"> Earth Mine</a>, <a href="http://simplegeo.com/" target="_blank">Simple Geo</a>, Google Street View, user generated internet photo sets like  Flickr all of these could be very important to AR, potentially.</p>
<p><strong>Chris Arkenberg:  Well, and the interesting thing about Earthmine is that theyâ€™re effectively trying to do an extremely precise pixel to pixel location mapping.  So theyâ€™re taking pictures of cities just like Street View, except theyâ€™re using the Z axis to interrogate depth and then using very precise geolocation to attach a GPS signature to each pixel that theyâ€™re registering in their images. Effectively, you get a one-to-one data set between pixels and locations.  And so you can look at something like Google Street View, and if you point to the side of a building, in theory, it should know exactly where that is.</strong></p>
<p><strong>Theyâ€™re rolling this out with the idea of being able to tag augmented reality objects in layers directly to surfaces in the real world.  So thatâ€™s another approach to trying to get accurate registration and to try and create what are essentially mirror worlds. Then your Google Street View becomes a canvas for authoring the blended world, because if you plop a 3D object into Street View on your desktop, and then you go out to that location with your AR headset, youâ€™ll see that 3D object on the actual street.</strong></p>
<p><strong>Tish Shute:</strong> There was some experimental work with Google Earth as a platform for a kind of simulated AR but I suppose Google Earth doesnâ€™t figure in the battle for the network economy as it never got developed as a platform.</p>
<p><strong>Chris Arkenberg:  It hasnâ€™t tried to become a platform, to my  knowledge.  I mean I know some people are doing stuff with it, but as far as I know, Google owns it, they did it the best because they have the best maps, and thereâ€™s not a huge ecosystem of development thatâ€™s based around it other than content layers.</strong></p>
<p><strong>And my sense of everything else on the Points of Control map is theyâ€™re looking more at these sort of platform technologies thatâ€¦</strong></p>
<p><strong>Tish Shute:</strong> Yes, re platforms for growth for AR. Gaming consoles will probably emerge as a significant platform for AR this year.</p>
<p><strong>Chris Arkenberg:  There will be much more of a blended reality experience in the living room for sure, and with interactive billboards. Digital mirrors are another area.  So I mean if we kind of extend AR to include just blended reality in general, you know, this is moving into our culture through a number of different points. As you mentioned, it will be in the living room, it will be in our department stores where you can preview different outfits in their mirror. Weâ€™re already seeing these giant interactive digital billboards in Times Square and other areas.</strong></p>
<p><strong>Itâ€™s funny.  I mean for me, the sort of blended reality aside, the augmented reality, to me, is actually a very simple proposition in some respects.  When I look at this map, augmented reality is just an interface layer to this map in my mind, just as itâ€™s an interface layer to the cloud and itâ€™s an interface layer to the instrumented world. Itâ€™s a way to get information out of our devices and onto the world.</strong></p>
<p><strong>Tish Shute:</strong> The importance of leveraging existing platforms has become pretty clear but it is interesting Facebook definitely gave Zynga the opportunity but would Facebook be so big without Zingaâ€™s social gaming boost?</p>
<p><strong>Chris Arkenberg:  I feel that Zynga has definitely helped its growthâ€¦But I think Zynga has benefited a lot more from Facebook than Facebook has from Zynga.</strong></p>
<p><strong>Tish Shute:</strong> Zynga certainly proved you  could build a profitable business on Facebookâ€™s API!</p>
<p><strong>Chris Arkenberg:  They did.  And they also really validated the Facebook ecosystem and the platform.  They really extended itâ€¦ Zynga benefited from the massive social affordances that Facebook had already architected and developed. They brought gaming directly into Facebook, and particularly, this emerging brand of lightweight social gaming that when you sit it on top of a massive global social network like Facebook, it suddenly lights up.</strong></p>
<p><strong>Tish Shute: </strong>AR pioneers should quite carefully go through this map. There is so much to think about here. Iâ€™m a kind of fanatic about  Streams of  Activity in AR.  Real time brokerages and their potential for AR is something I am fascinated by.  That is one reason I love the ARWave project.</p>
<p>Anselm Hook, to me, is one of the great thinkers in this area of real time brokerages &#8211; with his project Angel, and the work of <a href="http://www.ushahidi.com/" target="_blank">Ushahidi,</a> which is now the platform <a href="http://www.ugotrade.com/2010/09/17/urban-augmented-realities-and-social-augmentations-that-matter-interview-with-bruce-sterling-part-2/" target="_blank">for augmented foraging (see here)</a>.  Anselm is now working on AR at PARC which is exciting.</p>
<p><strong>Chris Arkenberg:  Well, there are some challenges working with data streams. Presentation and filtering I think is a big challenge with any sort of stream.  Because obviously, you have a lot of potential data to manage, to parse, and to make valuable and comprehensible. So I think this is bound very closely to being able to personalize experiences, or having very discreet valuable experiences.  Disaster relief, for example, I think is an interesting idea that ties into the Pachube type of work. Where, if you had the headset and you were a relief worker, and you had immediate lightweight, non-intrusive, heads up alpha channel overlay, waypoint markers showing you all of the disaster locations or points of need, AR becomes extremely valuable, because itâ€™s a primarily hands-free environment.  This is why the military stuff is so interesting.</strong></p>
<p><strong>Tish Shute:</strong> Ha!  We are running  into the eye patch/shades/goggles/sexy specs thing again.  But filtering and making streams of activity relevant will be very interesting for  AR.Â  Again that it why I love the Wave Federation Protocol work because what they have built into their XMPP extensions.  You can have your real-time personal data streams, or community streams, or broadcast publicly &#8211; the permissions are built.</p>
<p>And Thomas Wrobelâ€™s original vision of these layers and channels is only fully expressed if you have the eyewear.</p>
<p><strong>Chris Arkenberg:  Well, and it becomes redundant if itâ€™s on a mobile. To use a very basic example, Twitter, obviously thereâ€™s an app you can view those streams of activity on the camera stream. But you can view that real time data on the screen.  Why do you need to see it heads up?</strong></p>
<p><strong>The reason I really pay attention to what the military is investing in, one, because they have a ton of money, but also because they tend to represent the core bio survival needs of the speciesâ€¦So, when I look at computing, I see this very obvious trend of computers getting smaller and smaller and closer and closer to us because theyâ€™re so valuable to our success.  They give us so much valuable information for engaging our world on a moment by moment basis.  So, of course now we have these tiny little handheld devices that give us access to the global knowledge depositories of human history, because itâ€™s so useful to have that stuff right at hand.</strong></p>
<p><strong>The only impediment now is that it takes one of our hands, if not both of them, to access it.  So if you are in the natural world, which we are all always in the natural world, ultimately, you want your hands free in order to engage with the world on a physical level.</strong></p>
<p><strong>I see computation, or rather, our access to computation is just going to get thinner and thinner, and weâ€™ll very soon move into eyewear, and inevitably, weâ€™ll move into brain computer interface in some capacity.</strong></p>
<p><strong>So when youâ€™re the disaster worker, or a deployed soldier, or the extreme mountain biker, or the heli-skier, or just an adventurer, there are a lot of very practical reasons to have access to information on a heads-up plane. I see AR as being so profound and so valuable, but weâ€™re getting a glimpse of it in its infancy, and itâ€™s got a ways to go to be able to really contain what it is weâ€™re reaching for.</strong></p>
<p><strong>Tish Shute:</strong> I agree.</p>
<p><strong>Chris Arkenberg:  And thatâ€™s been a big criticism Iâ€™ve had with all the existing AR implementations that Iâ€™ve seen, is that the UI really needs a revolution.  Itâ€™s very heavy handed.  It is not dynamic, even though itâ€™s supposed to be.  It does not take advantage of transparencies.  It treats the screen like a screen.  It doesnâ€™t treat the screen like a window onto the real world. When youâ€™re looking on the real world, you donâ€™t want a lot of occlusion.  You want very soft-touch indicators of a data shadow behind something that you can then address and then have it call out the information thatâ€™s important to you.</strong></p>
<p>Tish Shute:  Now, thatâ€™s a very nice kind of image youâ€™ve conjured for me there.  Do you see that more could be done on the smartphone than is being done within that?  Or are we like waiting for the old ishades?</p>
<p><strong>Chris Arkenberg:  I think thereâ€™s definitely a lot of room for improvement on the smartphone UI.  Nobodyâ€™s really played around with it much. And again, I think thatâ€™s in part that there hasnâ€™t been a really established platform with enough money to fund interesting UI work. We see it in some of the concept demos that float around every now and then.</strong></p>
<p><strong>I guess itâ€™s both a blessing and curse that Iâ€™m always five steps ahead of where Iâ€™m trying to get to.</strong></p>
<p><strong>Tish Shute:</strong> Yeah, I am familiar with that feeling!</p>
<p><strong>Chris Arkenberg:  So Iâ€™m always trying to reach for the vision even though itâ€™s a bit distant. I think thereâ€™s going to be a lot of development on the handsets.  But again, I think we need a lot of refinement.  We need a lot of real critical analysis of why this is a good thing.</strong></p>
<p><strong>To get back to the original point of Raimoâ€™s comment, it struck me.  And I knew it, but I just had set it aside as gimmickry. But heâ€™s right.  Content is a huge driver for this.  Just stuff thatâ€™s engaging, and fun, and cool, and shows off the technology so they can get enough money to make it through whatever Trough of Disappointment may be waiting.</strong></p>
<p><strong>Tish Shute:</strong> Yeah, donâ€™t underestimate the Planes of Content!Â  They are a great place to get interest and money to keep AR technology  moving on, right?</p>
<p><strong>Chris Arkenberg:  Yeah, yeah.  Because, you know, thereâ€™s a lot of freedom there.  And you can piggyback on all the rest of the content thatâ€™s out there and jump on memes and marketing objectives, etc&#8230;</strong></p>
<p><strong>And thereâ€™s a lot of stuffâ€¦Iâ€™m blanking on some of the names, but some of these historical recreations of city streets.  Thereâ€™s a street in London where they overlaid historical photos in a really compelling experience. [Museum of London - http://www.museumoflondon.org.uk/] Again, Iâ€™m completely forgetting the attributions, but hose are the type of things that can really be pursued on the existing platforms.  There is stuff thatâ€™s really compelling and really cool.</strong></p>
<p><strong>I heard of another interesting use case &#8211; and I should say that I canâ€™t find attributions to this anywhere on the web and I may be paraphrasing or mis-representing the actual work, but I think the concept is worth exploring anyway. But the idea was that you could take the locations of border checkpoints and conflict sites in Palestine and Israel and visually overlay them on an AR layer in San Francisco.  And it would do some sort of transposition where you could virtually view these things in San Francisco with the same locational mapping superimposed. So you could see where the checkpoints where.  You could see where the wall was.  You could see where suicide bombings were and where there had been conflicts.</strong> <strong>[I cannot find any citations for this!]</strong></p>
<p><strong>Tish Shute: </strong> But with an AR view?  But why would you use an AR view if you  are in San Francisco, then?</p>
<p><strong>Chris Arkenberg:  Because it superimposes two realities, translating the Gaza conflict into San Francisco as you are walking around. You can interrogate the world. Thereâ€™s a discoverability aspect where youâ€™re using the headset to reveal things, or the handset rather, to reveal things that you could not see otherwise in your city. It was done as an art piece, but as a provocative, obviously political art piece.</strong></p>
<p><strong>Tish Shute: </strong>Very interesting.  Iâ€™d love to see that. Because thatâ€™s interesting to get away from this idea that you actually have to sort of have this one to one relationship between the data and the world is kinda nice, isnâ€™t it?  Well, not one to one, but a very literalâ€¦getting away from that literalness is kind of good.</p>
<p><strong>Chris Arkenberg:  And thatâ€™s a possibility of virtual reality and augmented reality merging, that maybe virtual reality is actually going to do best by coming out of the box and writing itself over our reality, so that as you are walking around, you are no longer seeing San Francisco, but you are seeing part of Everquest or World of Warcraft.</strong></p>
<p><strong>Tish Shute: </strong> Well this is where Bruce Sterling gets to that point he made in <a href="http://augmentedrealityevent.com/2010/06/06/are-2010-keynote-by-bruce-sterling-build-a-big-pie/" target="_blank">his keynote for are2010</a>, that if we actually have viable AR eyewear, then you get the gothic stepsister of AR, VR rising from the grave!Â  He asks whether the very charm of augmented reality, is in fact that it adds rather than subtracts from your engagement with the world and that getting get sucked back into the black hole of VR might not be so great.</p>
<p><strong>Chris Arkenberg:  And then you get all sorts of interesting challenges to social cohesion if you have a lot of different people experiencing very different worlds, effectively.  That if there is no real consensual reality and a majority of your local populous is, in fact, experiencing very different and unique versions of the world, what does that do to social cohesion?  How does that reinforce tribalism, for example, when only you and certain others get to opt in to a particular layer view of the world?</strong></p>
<p><strong>Tish Shute:</strong> Yes Jamais Cascio wrote an interesting piece on that issue on AR and social cohesion a while back.</p>
<p>An eye patch is a more logical vision than the goggles in many ways but I suppose the loss is stereo vision?</p>
<p><strong>Chris Arkenberg:  And actually, there were developments in military helicopter technology many years ago that used a single pane square of glass over the eye mounted to the helmets of pilots.  And then they drew various bits of heads-up information on it. So that ensures that youâ€™re having a real strong engagement with the real world, which, obviously, when youâ€™re a helicopter pilot is quite important.  But you still have access to the data layer of  the invisible world.</strong></p>
<p><strong>Tish Shute:</strong> I just went to <a href="http://www.cloudera.com/company/press-center/hadoop-world-nyc/" target="_blank">Hadoop World</a> and I have to say, I was awestruck about how big thatâ€™s got.  I mean <a href="http://hadoop.apache.org/" target="_blank">Hadoop</a> has gone from like zero to huge in just a few years.  I mean itâ€™s just like now everyone has the power of the Google big table at their fingertips.</p>
<p>Whatâ€™s the play for AR in the land of search?</p>
<p>I could imagine Hadoop being very powerful tool for AR analytics?</p>
<p>Have you got any thoughts on the land of search and AR? Of course visual search is proceeding at a fast pace and there is a lot of promise for integrations with AR in the future but the latency for visual search is still pretty high?</p>
<p><strong>Chris Arkenberg:  In the near term, not a lot.  In the medium term, thereâ€™s a larger trend towards virtual agents that you can program or teach to keep watch over things for you as an effort to scale down the data overload.  So search is something thatâ€™s going to become more personalized and more active.  Thereâ€™s a movement to make it so people can essentially deputize these agents to be always searching for them; to be out there looking for the things that they have told these agents are important to them.</strong></p>
<p><strong>So active search for AR I think presents some challenges, obviously because you need to do text input, typically, or voice input.  Voice input, I think, is much more achievable than text input for AR.  But I can certainly imagine an AR layer that is being serviced by these agents that we have roaming around the web for us reconciling their visual view of the world with our personalizations. AR apps are contextually aware so it knows that if youâ€™re downtown, itâ€™s not going to be giving you a ton of information about Software as a Service infrastructure, or what have you.  But that, instead, itâ€™s going to be handing you little tidbits about a particular clothing brand youâ€™ve opted in to follow and information about  music venues &amp; schedules, for example.  Or perhaps youâ€™ll be on the lookout for other users that have opted in to publicly tag themselves as a member of this or that affinity.</strong></p>
<p><strong>I keep coming back to this idea of AR as really just a simple visualization layer that all of these other technologies can potentially feed into.  So in that sense, search becomes a passive thing that AR is just simply presenting to you in a heads-up, hands-free, or potentially hands-free environment.</strong></p>
<p><strong>Tish Shute:</strong> Yes, the big challenge is the stepping stones to that point! Small steps that keep interest going into developing the underlying technology (and not just in research labs!) that will bring us that interface.Â  We have seen some movement already with Qualcomm.</p>
<p><strong>Chris Arkenberg:</strong> And there are bandwidth issues as well, as we can see with the Google Goggles, which is a great idea of visual search.  But you have to take a picture and send it to the cloud and wait for your results.  Itâ€™s not a real-time dynamic interrogation of the world.</p>
<p><strong>Tish Shute:</strong> Yes we are really only at the very beginning of  AR being ready for prime time.. it would be interesting to ask AR developers how many of them use AR on a daily basis.</p>
<p><strong>Chris Arkenberg:  I think a lot of us, weâ€™re just informed by the sci-fi myths and fascinated with the potential now thatâ€™s itâ€™s starting to become real. But I think we all kinda get that itâ€™s still extraordinarily young.  I mean the web is extraordinarily young. And AR is itself far younger in a lot of ways in its implementations.</strong></p>
<p><strong>Everybody has a lot of excitement about all of the great potentials that are being unleashed by this great wave of the Internet and the web and ubiquitous mobile computing.  So thatâ€™s why, you know, you look at that map and we talk about AR and you canâ€™t talk about any of the stuff without talking about all of it, in a lot of ways, particularly with something like AR where itâ€™s so ultimately agnostic and could be completely pervasive across all of these layers.</strong></p>
<p><strong>So my fascination is with the future, and I measure our progress towards it by the young nascent offerings from the platform players and the developers. And yeah, a lot of it isâ€¦itâ€™s akin to getting that first triangle on the screen in 3D.  You know, when the renderer finally works and you get a triangle on the screen, and you go, â€œOh my God, it renders.â€  And then you can start to really build polygons and build objects, and start doing boolian operations, and get light and rendering in there, and textures, and on, and on, and on.<br />
So Iâ€™m fascinated by the Layars and the Metaioâ€™sâ€¦<br />
[laughter]</strong></p>
<p><strong>Tish Shute:</strong> Yes and hats off to all the players in the emerging industry, Layar, Metaio, Ogmento, Total Immsersion, and all the others who are finding clever ways to bring fun aspects of  AR into the mainstream, and fuel interest to take the technology to the next level.</p>
<p><strong>Chris Arkenberg:  Absolutely.  And the hype cycle is very valuable.  It has really helped launch the AR industry.  Itâ€™s brought a lot of eyes, and itâ€™s brought a lot of money into the industry.  And itâ€™s forcing people like us to have these conversations to understand how to refine its growth and really focus on the potential in all these different venues, whether itâ€™s trying to save lives, or better understand your city, or have really compelling entertainment experiences.</strong></p>
<p><strong>Everybodyâ€™s excited, and everybodyâ€™s sharing, and everybodyâ€™s trying to move it forward in a way thatâ€™s the most productive.</strong></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2010/10/27/platforms-for-growth-and-points-of-control-for-augmented-reality-talking-with-chris-arkenberg/feed/</wfw:commentRss>
		<slash:comments>3</slash:comments>
		</item>
		<item>
		<title>The Next Wave of AR: Mobile Social Interaction Right Here, Right Now!</title>
		<link>http://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/</link>
		<comments>http://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/#comments</comments>
		<pubDate>Fri, 20 Nov 2009 04:53:07 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Artificial general Intelligence]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[online privacy]]></category>
		<category><![CDATA[open source]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[AR browsers]]></category>
		<category><![CDATA[AR Dev camp]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[calo]]></category>
		<category><![CDATA[mobile social]]></category>
		<category><![CDATA[mobile social interaction utility]]></category>
		<category><![CDATA[open distributed augmented reality]]></category>
		<category><![CDATA[pygowave]]></category>
		<category><![CDATA[real time internet]]></category>
		<category><![CDATA[siri]]></category>
		<category><![CDATA[smart things]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented reality]]></category>
		<category><![CDATA[The Copenhagen Wheel]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[the outernet]]></category>
		<category><![CDATA[the sentient city]]></category>
		<category><![CDATA[Wave Federation Protocol]]></category>
		<category><![CDATA[Web Squared]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4869</guid>
		<description><![CDATA[The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now! View more presentations from Tish Shute. Click on the image below or here to watch this presentation and others from Momo13]]></description>
				<content:encoded><![CDATA[<div id="__ss_2542526" style="width: 425px; text-align: left;"><a style="font:14px Helvetica,Arial,Sans-serif;display:block;margin:12px 0 3px 0;text-decoration:underline;" title="The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now!" href="http://www.slideshare.net/TishShute/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526">The Next Wave of AR: Mobile Social Interaction, Right Here, Right Now!</a><object style="margin:0px" classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="425" height="355" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="allowFullScreen" value="true" /><param name="allowScriptAccess" value="always" /><param name="src" value="http://static.slidesharecdn.com/swf/ssplayer2.swf?doc=thenextwaveofar2-091120000046-phpapp01&amp;stripped_title=the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526" /><param name="allowfullscreen" value="true" /><embed style="margin:0px" type="application/x-shockwave-flash" width="425" height="355" src="http://static.slidesharecdn.com/swf/ssplayer2.swf?doc=thenextwaveofar2-091120000046-phpapp01&amp;stripped_title=the-next-wave-of-ar-mobile-social-interaction-right-here-right-now-2542526" allowscriptaccess="always" allowfullscreen="true"></embed></object></p>
<div style="font-size: 11px; font-family: tahoma,arial; height: 26px; padding-top: 2px;">View more <a style="text-decoration:underline;" href="http://www.slideshare.net/">presentations</a> from <a style="text-decoration:underline;" href="http://www.slideshare.net/TishShute">Tish Shute</a>.</div>
<p>Click on the image below or <a href="http://www.mobilemonday.nl/talks/tish-shute-the-next-wave-of-ar/" target="_blank">here to watch</a> this presentation and others from <a href="http://www.mobilemonday.nl/">Momo13</a></div>
<p><a href="http://www.mobilemonday.nl/talks/tish-shute-the-next-wave-of-ar/" target="_blank"><img class="alignnone size-medium wp-image-4876" title="Screen shot 2009-11-20 at 1.32.24 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-20-at-1.32.24-PM-300x167.png" alt="Screen shot 2009-11-20 at 1.32.24 PM" width="300" height="167" /></a></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/11/19/the-next-wave-of-ar-mobile-social-interaction-right-here-right-now/feed/</wfw:commentRss>
		<slash:comments>4</slash:comments>
		</item>
		<item>
		<title>Toward the Sentient City: The Future of the Outernet and How to Imagine it?</title>
		<link>http://www.ugotrade.com/2009/11/09/toward-the-sentient-city-the-future-of-the-outernet-and-how-to-imagine-it/</link>
		<comments>http://www.ugotrade.com/2009/11/09/toward-the-sentient-city-the-future-of-the-outernet-and-how-to-imagine-it/#comments</comments>
		<pubDate>Mon, 09 Nov 2009 21:09:00 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[3rd cloud]]></category>
		<category><![CDATA[Adam Greenfield]]></category>
		<category><![CDATA[aesthetics of distributed participation]]></category>
		<category><![CDATA[Amphibious Architecture]]></category>
		<category><![CDATA[architectures of participation]]></category>
		<category><![CDATA[asynchronous city]]></category>
		<category><![CDATA[Benjamin H. Bratton]]></category>
		<category><![CDATA[Breakout!]]></category>
		<category><![CDATA[Conflux 2009]]></category>
		<category><![CDATA[Dan Hill]]></category>
		<category><![CDATA[Dharma Dailey]]></category>
		<category><![CDATA[distributed open AR]]></category>
		<category><![CDATA[Enrique Ramirez]]></category>
		<category><![CDATA[everyware]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[human electric hybrid]]></category>
		<category><![CDATA[hybrid social netoworks]]></category>
		<category><![CDATA[julian Bleeker]]></category>
		<category><![CDATA[Laura Forlano]]></category>
		<category><![CDATA[location aware applications]]></category>
		<category><![CDATA[Mark Shepard]]></category>
		<category><![CDATA[Martijn de Waal]]></category>
		<category><![CDATA[Matthew Fuller]]></category>
		<category><![CDATA[Mimi Zeiger]]></category>
		<category><![CDATA[Natalie Jeremijenko]]></category>
		<category><![CDATA[Natural Fuse]]></category>
		<category><![CDATA[new architectures of participation]]></category>
		<category><![CDATA[Nicolas Nova]]></category>
		<category><![CDATA[Omar Khan]]></category>
		<category><![CDATA[Open AR]]></category>
		<category><![CDATA[outernet]]></category>
		<category><![CDATA[Philip Beesley]]></category>
		<category><![CDATA[real time communication]]></category>
		<category><![CDATA[real time web]]></category>
		<category><![CDATA[real-time database enable city]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[Sentient City Survival Kit]]></category>
		<category><![CDATA[Situated Technologies]]></category>
		<category><![CDATA[smart things]]></category>
		<category><![CDATA[social mobility]]></category>
		<category><![CDATA[social mobility and the 3rd cloud]]></category>
		<category><![CDATA[synchronous internet of things]]></category>
		<category><![CDATA[The Copenhagen Wheel]]></category>
		<category><![CDATA[The Living Architecture Lab]]></category>
		<category><![CDATA[the social negotiation of Technology]]></category>
		<category><![CDATA[Too Smart City]]></category>
		<category><![CDATA[Toward the Sentient City]]></category>
		<category><![CDATA[Trash Track]]></category>
		<category><![CDATA[urban sustainability]]></category>
		<category><![CDATA[urbanware]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[Web Squared]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4758</guid>
		<description><![CDATA[Amphibious Architecture &#8211; &#8220;submerges ubiquitous computing into the waterâ€”that 90% of the Earthâ€™s inhabitable volume that envelops New York City but remains under-explored and under-engaged.&#8221; Toward the Sentient City, brought &#8220;architects and urban designers into a conversation that until now has been limited largely to technologists,â€ and created an extraordinary opportunity to investigate distributed architectures [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.sentientcity.net/exhibit/?p=603" target="_blank"><span id="n.6p" title="Click to view full content"> </span></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-12.03.40-AM.png"><img class="alignnone size-medium wp-image-4783" title="Screen shot 2009-11-06 at 12.03.40 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-12.03.40-AM-300x200.png" alt="Screen shot 2009-11-06 at 12.03.40 AM" width="300" height="200" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/dhj5mk2g_404g3prc6dc_b.jpg"><img class="alignnone size-medium wp-image-4759" title="dhj5mk2g_404g3prc6dc_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/dhj5mk2g_404g3prc6dc_b-300x199.jpg" alt="dhj5mk2g_404g3prc6dc_b" width="300" height="199" /></a><br />
<span id="ot:x" title="Click to view full content"> </span></p>
<p><em><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank"><span id="it_d" title="Click to view full content">Amphibious </span>Architecture</a> &#8211; &#8220;submerges ubiquitous computing into the waterâ€”that 90% of the Earthâ€™s inhabitable volume that envelops New York City but remains under-explored and under-engaged.&#8221;</em></p>
<p><a href="http://www.sentientcity.net/exhibit/">Toward the Sentient City</a>,<span id="ju31" title="Click to view full content"> brought </span> &#8220;architects and urban designers into a conversation that until now has been limited largely to technologists,â€ and <span id="hb:z" title="Click to view full content">created an extraordinary opportunity to investigate distributed architectures of participation of what we might call the &#8220;outernet.&#8221;Â  This is a</span><span id="hb:z" title="Click to view full content"> timely conversation as &#8220;web squared,&#8221;Â  &#8220;smart things,&#8221; the &#8220;internet of things,&#8221; or the &#8220;outernet,&#8221;</span><span id="g6ad" title="Click to view full content"> and their popular &#8220;ambassador&#8221; augmented reality are rapidly becoming everyone&#8217;s &#8220;business.&#8221;</span><span id="eb9y" title="Click to view full content"> From </span><span id="b265" title="Click to view full content">&#8220;evil&#8221; marketers, to global corporations, </span><span id="sq48" title="Click to view full content">environmentalists, artists and community activists -Â  everyone, it seems, is</span><span id="mqn_" title="Click to view full content"> interested in the possibilities of this new frontier.</span></p>
<p><span id="ot:x" title="Click to view full content">It is a challenging task to respond to, </span><a href="http://www.sentientcity.net/exhibit/">Toward the Sentient City</a><span id="ot:x" title="Click to view full content">, an exhibition whose backdrop includes a series of conversations on Situated Technologies &#8211; published by the Architectural League, from a circle of people who have been thinking, writing, and speaking on networked urbanism for many years now, including: Adam Greenfield, </span><span id="vjks" title="Click to view full content"> Mark Shepard, Matthew Fuller, Usman Haque, Benjamin H. Bratton, Natalie JeremiJenko, Laura Forlano, Dharma Dailey,Â  Philip Beesley, Omar Khan, Julian Bleeker, Nicolas Nova</span><span id="o7yp" title="Click to view full content">.Â  And the exhibition itself has a very thoughtful group of respondents, see posts from: <a href="http://www.sentientcity.net/exhibit/?p=595" target="_blank">Dan Hill</a>, <a href="http://www.sentientcity.net/exhibit/?p=659" target="_blank">Martijn de Waal,</a> <a href="http://www.sentientcity.net/exhibit/?p=622" target="_blank">Enrique Ramirez</a>, and <a href="http://www.sentientcity.net/exhibit/?p=603" target="_blank">Mimi Zeiger.</a></span><a href="http://www.sentientcity.net/exhibit/?p=603" target="_blank"><span id="n.6p" title="Click to view full content"> </span></a></p>
<p>But one ofÂ  Toward the Sentient City&#8217;s key accomplishments was to go beyond the rhetorical, and to put practical examples out into the world to<span id="ijgh" title="Click to view full content"> organize a discussion on some of the ideas and possibilities of ubiquitous computing that have barely begun to emerge from academic research, and entrepreneurial blue skying.Â  As curator, </span><a href="http://www.andinc.org/v3/" target="_blank">Mark Shepard</a><span id="ijgh" title="Click to view full content">, explained:<br />
</span></p>
<p><strong><span id="fqkh" title="Click to view full content">&#8220;The </span></strong><strong><span id="tq6_" title="Click to view full content"><span>aim is to provide concrete examples in the present around which to organize a discussion about just what kind of future we might want. Whether theyâ€™re prototypes or not, these commissions are concrete examples. Theyâ€™re not abstract ideas. And we can go stand next to each other and look at and interact with something which is out there in the world behaving in the way it behaves, performing as it does, and we can then begin to have a discussion about it that is less dependent upon powers of rhetoric.</span> So itâ€™s not about me persuading you about an idea but itâ€™s about us evaluating something thatâ€™s living and existing in this world. And that was really the intention of the show.â€</span></strong></p>
<p><span id="ijgh" title="Click to view full content">The commissioned works </span><span id="d4-:" title="Click to view full content">-<a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank"> Amphibious Arc</a></span><span id="d4-:" title="Click to view full content"><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank">hitecture</a>, <a href="http://www.sentientcity.net/exhibit/?p=53" target="_blank">Breakout!</a>, <a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a>, <a href="http://www.sentientcity.net/exhibit/?p=59" target="_blank">Too Smart City</a>, and <a href="http://www.sentientcity.net/exhibit/?p=31" target="_blank">TrashTrack,</a> </span><span id="xnxp" title="Click to view full content">that were the hub of Toward the Sentient City&#8217;s </span><span id="g.08" title="Click to view full content"> events, themes and texts, provided a unique glimpse</span><span id="j-jh" title="Click to view full content"> at </span><span id="pa9i" title="Click to view full content">some of the possible dystopian and utopian futures of a &#8220;smart&#8221; city.Â  But, most importantly,Â  all the works questioned what might be new </span><span id="ijgh" title="Click to view full content">architectures of participation for a sentient city. </span></p>
<h3>New Architectures of Participation: Hybrid Social Networks with Human and Non-human Participants .</h3>
<p>Of the five works, Amphibious Architecture and Natural Fuse were particularly fascinating to me because they explored the possibilities of sensor networks to create new forms of distributed participation in networked ecosystems that connected the experience/trajectories of human and non human actors &#8211; fish, plants,Â  and people.</p>
<p>Both Amphibious Architecture, andÂ  &#8220;Natural Fuse&#8221; &#8211; from Usman Haque and <a href="http://www.haque.co.uk/" target="_blank">Haque Design + Research,</a> gave exhibition attendees the chance to experience at a personal level our relationships with our non-human neighbors.</p>
<p><a href="http://www.sentientcity.net/exhibit/?p=5" target="_blank"><span id="it_d" title="Click to view full content">Amphibious </span>Architecture</a> from the The Living Architecture Lab at Columbia University Graduate School of Architecture, Planning and Preservation (Directors David Benjamin and Soo-in Yang) and Natalie Jeremijenko, Environmental Health Clinic at New York University, <span id="w.m9" title="Click to view full content">used a sensor array to &#8220;pierce the reflective </span><span id="ud4u" title="Click to view full content">surface of the water&#8221; that</span> separates us from the underwater ecosystem below.Â  <span id="kfwr" title="Click to view full content">The sensor arrays just below the surface of the East River andÂ  floating light array</span> (see picture on left opening this post) create a new interface between people and fish whose movements and water quality are transmitted in light.</p>
<p>One could also SMS the fish and the single beaver that lives in the rivers surrounding NYC to find the conditions they were experiencing.<span id="cehj" title="Click to view full content"> But t</span><span id="y9m6" title="Click to view full content">urning the city&#8217;s &#8220;back stories,&#8221; like the movements of &#8220;Yo beaver,&#8221; and the oxygen levels and water quality of the rivers into &#8220;fore stories,&#8221; is only one of the many ways Natalie JeremiJenko explores how we can engender the empathy necessary for humans and non humans to live in harmony and mutual benefit.</span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/nataliefishandmicrochips.jpg"><img class="alignnone size-medium wp-image-4802" title="nataliefishandmicrochips" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/nataliefishandmicrochips-300x199.jpg" alt="nataliefishandmicrochips" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/fishfoodpost.jpg"><img class="alignnone size-medium wp-image-4803" title="fishfoodpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/fishfoodpost-300x199.jpg" alt="fishfoodpost" width="300" height="199" /></a></p>
<p><span id="y9m6" title="Click to view full content"> </span>Toward the Sentient City also held workshops/presentations in conjunction with <a href="http://confluxfestival.org/2009/" target="_blank">Conflux 2009</a>. After her Conflux presentation, Natalie Jeremijenko of Amphibious Architecture (which is also a collaborative project between <a href="http://www.environmentalhealthclinic.net/">xClinic</a>, <a href="http://www.thelivingnewyork.com/">The Living</a><span id="wz9v" title="Click to view full content">, </span>&#8220;and other intelligent creatures on the East River&#8221;)Â  invited participants to enjoy a lunch of cross-species foods at the East River site.Â  <span id="k2u." title="Click to view full content"> </span></p>
<p><span id="k2u." title="Click to view full content">The cross-species lunch takes </span><span id="x0h." title="Click to view full content"> an existing interaction pattern through which people and fish are already communicating, </span><span id="tkk5" title="Click to view full content">i.e., people going to the river â€“ the waterfront,Â  and feeding the fish</span><span id="vct4" title="Click to view full content"> Wonder Bread (which is bad for humans and fish); and transforms this desire to feed the fish into something which actually can remove the mercury content from the fish and our bodies by removing it from the food chain, so a previously inharmonious connection between people and fish, is redirected into a productive interaction benefitting both species.Â  As it turns out, food that is good for Fish (see pictures above), and removes mercury from their bodies can also be nutritious and tasty for humans. </span></p>
<p><a href="http://www.sentientcity.net/exhibit/?p=43" target="_blank">Natural Fuse</a>, from team members, Usman Haque, creative director, Nitipak â€˜Dotâ€™ Samsen, designer, Ai Hasegawa, designer, Cesar Harada, designer, Barbara Jasinowicz, producer, used sensors toÂ <span id="oenx" title="Click to view full content"> link humans and plants in network where we are accountable for how our behavior effects others in your ecosystem. </span></p>
<p><span id="oenx" title="Click to view full content">If you brought an ordinary plant to the exhibition, you could take home an electronically assisted plant and become part of a social network of humans and plants. This network of humans and electronically assisted plants is also a carbon sink and ifÂ  more energy is consumed than the total number of plants in the social network can offset, plants begin to die giving immediate feedback and consequences to being greedy about energy consumption. </span><span id="ijgh" title="Click to view full content">For more about joining the Natural Fuse network see<a href="http://www.naturalfuse.org" target="_blank"> here.</a><br />
</span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusepres.jpg"><img title="naturalfusepres" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusepres-300x199.jpg" alt="naturalfusepres" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusetakehome.jpg"><img title="naturalfusetakehome" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/naturalfusetakehome-300x199.jpg" alt="naturalfusetakehome" width="300" height="199" /></a></p>
<p><span id="pa9i" title="Click to view full content"> </span><span id="w-ed" title="Click to view full content">We are in the pre-dawn ofÂ  sensor networks like those Natural Fuse and Amphibious Architecture created &#8211; social</span><span id="n.6p" title="Click to view full content"> networksÂ  that link human and non human participants in entirely new ways are largely an uncharted territory. </span><span id="o7yp" title="Click to view full content">(Note: T</span><span id="zr9t" title="Click to view full content">he upcoming <a href="http://www.situatedtechnologies.net/" target="_blank">Situated Technologies</a> Pamphlet 6</span><span id="ijgh" title="Click to view full content"> &#8211; <strong>&#8220;Micro Public Places,&#8221; </strong>Marc Bohlen and Hans Frei, indicates it will continue the journey with an investigation ofÂ  &#8220;transparent and distributed participation.&#8221;)</span></p>
<h3>Where Does the Social Negotiation ofÂ  Technology Happen?</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/markshepardpost.jpg"><img class="alignnone size-medium wp-image-4825" title="markshepardpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/markshepardpost-199x300.jpg" alt="markshepardpost" width="199" height="300" /></a></p>
<p>Frequent questions that came up at the presentations given by the teams that produced the commissioned works were: Does this idea scale?Â Â  Does it close the loop in that you <span>get answers to the questions asked?Â  How does the conversation gain agency?Â  And where does the social negotiation of technology happen?Â  (These last two questions were asked by <a href="http://www.orangecone.com/" target="_blank">Mike Kuniavsky</a> at Mark Shepardâ€™s presentation at Conflux: â€œ</span><a id="ktb-" title="Sentient City Survival Kit" href="http://survival.sentientcity.net/" target="_blank"><span>Sentient City Survival Kit</span></a><span>.â€ â€“ see picture above)Â  I think it is fair to say that these questions for the most part remain unanswered. But Toward the Sentient city was alive with ideas and practical examples about ways we can explore these questions more deeply.</span></p>
<p><span id="oenx" title="Click to view full content">Usman Haque in response to the question, &#8220;Does this experiment scale?,&#8221; replied:</span></p>
<p><strong>&#8220;it would, but at an individual level because it has to remain at the individual level because it is about the individual in relationship to the wider social context as opposed to building a forest to offset a city it is about each individual making choices of their own about what they do andÂ  having some kind of knowledge about the effect they are having on other people because most of the time we are quite complacent &#8211; we are able to do whatever we want because we are not necessarily aware how our intrusions effect both human and non-human neighbors&#8230;.&#8217;</strong></p>
<p>So how does this close the loop?Â  Usman explains that one of the key aspects for him is that if you do take home a plant you become part of a system in which you are no longer anonymous and if a plant is threatened (plants get three lives) you have the opportunity to email the person in the system who has threatened your plant.Â  Usman noted that one of the interesting things that happened in the context of the exhibition, where there was a single unit, was that 90% of the time people switched it on to selfish mode &#8211; presumably because they were anonymous.Â  Another aspect of Natural Fuse that raises interesting questions is that as more people decide to join the network the risk of a plant being harmed by any particular individual&#8217;s selfishness lessens.Â  As <a href="http://www.sentientcity.net/exhibit/?p=659" target="_blank">Martijn de Waal</a>,<span id="gi2_" title="Click to view full content"> in his response that unpacks some of the deeper philosophical, epistemological, and ethical questions that Natural Fuse addresses, observes:</span></p>
<p><strong>&#8220;The concept of a commons thus assumes cooperation and mutual accommodation. Could Sentient Technology play a role in the allocation of limited resources between citizens? Could it lead to the emergence of some sort of peer-to-peer governance model, that could prevent overusage of scarce resources?&#8221;</strong></p>
<h3><strong><br />
New Aesthetics of Distributed Participation</strong></h3>
<p><span id="nqx:" title="Click to view full content">The works of, </span><span id="nqx:" title="Click to view full content"><span> &#8220;Toward the Sentient City&#8221; point to possibilities for a new aesthetics of distributed participation in which users and system are no longer separated but instead â€œdevelop joint forms of observing and knowing that neither [...] is capable on its own.â€ (quote from upcoming, <a href="http://www.situatedtechnologies.net/" target="_blank">Situated Technologies Pamphlets</a></span> 6: Micro Public Places, Marc Bohlen and Hans Frei).Â  Natural Fuse and Amphibious Architecture examine the new transactional realities of the Sentient City.</span></p>
<p><span id="po-s" title="Click to view full content"> But there are many questions left unanswered.Â  We know a lot about the power of generativity from the </span>internet (see Zittrain)-Â  the ur<strong> &#8220;architecture of participation.&#8221;</strong> <span id="hri-" title="Click to view full content">As Zittrain points out, the &#8220;generativity&#8221; of the internet is &#8220;the engine that has catapaulted the internet from backwater to ubiquity.&#8221; </span> Tim O&#8217;Reilly coined the phrase, &#8220;architecture of participation,&#8221; to &#8220;describe the nature of systems that are designed for user contribution,&#8221;<span id="o7et" title="Click to view full content"> such that &#8220;participants extend the reach/increase the value of the system.&#8221;Â  But as Tim O&#8217;Reilly put it in his recent talk, &#8220;<a href="http://www.slideshare.net/timoreilly/state-of-the-internet-operating-system" target="_blank">State of the Internet Operating System:&#8221;</a></span></p>
<p><span title="Click to view full content"><strong>&#8220;Web 2.0 is about finding meaning in user-generated data, and turning that meaning into real-time user facing services.Â  &#8220;Web Squared&#8221; takes that same concept to real-time sensor data.&#8221;</strong><br />
</span></p>
<p><span id="o7et" title="Click to view full content">We know little yet about what constitutes generativity for the &#8220;outernet,&#8221; particularly for the kind ofÂ  hybrid social networks that Natural Fuse and Amphibious Architecture present.Â  Social Networks that connect people and place, humans and non humans, challenge dichotomies of man and nature, and machine and user in new and unexpected ways.</span></p>
<p>At the moment, the internet is going through a metamorphosis with the emergence of real time technologies like XMPP, PubHubSubBub and Google Wave and the coming of age of mobile computing.Â Â  While these shifts were not investigated specifically in any of the commissioned works I think all the worksÂ  begged the question,Â  What is a common platform for social interaction in the &#8220;outernet,&#8221; or sentient city?Â  I was not entirely satisfied, from this point of view, with a web interface for Natural Fuse or SMS as a mobile interface for Amphibious Architecture.</p>
<p><a href="http://www.media.mit.edu/people/dpreed" target="_blank">David P. Reed</a> points to the relationship between social mobility what he describes as the 3rd cloudÂ  and the need for a common platform (see <a href="http://www.slideshare.net/venicesessions/david-reed-social-mobility-and-the-3rd-cloud" target="_blank">David Reed &#8211; Social Mobility and the 3rd Cloud</a>. Hat tip to <a href="http://twitter.com/srenan" target="_blank">@srenan</a> for pointing me to David&#8217;s presentation).</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.11.25-PM.png"><img class="alignnone size-medium wp-image-4826" title="Screen shot 2009-11-06 at 11.11.25 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.11.25-PM-300x222.png" alt="Screen shot 2009-11-06 at 11.11.25 PM" width="300" height="222" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.16.59-PM1.png"><img class="alignnone size-medium wp-image-4828" title="Screen shot 2009-11-06 at 11.16.59 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-06-at-11.16.59-PM1-300x222.png" alt="Screen shot 2009-11-06 at 11.16.59 PM" width="300" height="222" /></a></p>
<p><em>Slides above are from David P. Reed&#8217;s presentation,Â <a href="http://www.slideshare.net/venicesessions/david-reed-social-mobility-and-the-3rd-cloud" target="_blank"> Social Mobility and the 3rd Cloud</a></em><a href="http://www.slideshare.net/venicesessions/david-reed-social-mobility-and-the-3rd-cloud" target="_blank"></a></p>
<p>What is an architecture of participation for mobile, social interaction? This is something I am very interested in.</p>
<p>Recently I began a project with a small group of augmented reality developers and enthusiasts to use Google Wave Federation Protocol as a transport system for open distributed, social augmented experiences (lots more to come on this soon &#8211; you can see the back story in my posts <a href="http://www.ugotrade.com/2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/" target="_blank">here</a> and <a href="http://www.ugotrade.com/2009/09/26/total-immersion-and-the-transfigured-city-shared-augmented-realities-the-web-squared-era-and-google-wave/" target="_blank">here</a>).Â  Wave has introduced an open federated architecture of participation that <strong style="font-weight: normal;">combines asynchronous &amp; synchronous data,Â  bringingÂ  together the advantages of real-time communication with the persistent hosting of collaborative data (like wikis). </strong><strong> </strong></p>
<p>Augmented Reality puts who you are, where you are, and what you are doing center stage, and is an interface for &#8220;communications embedded in context&#8221; and &#8220;enabled by identity&#8221; &#8211; two key qualities of what David <span>P. Reed calls the 3rd cloud.Â  An open, distributed framework for augmented reality could createÂ  an interconnected sense of AR, one that fuses augmentation, data overlays, and varied media with location/time/place and crucially, social networking.Â  Such an interface would open up many possibilities for the new transactional realities that could </span>integrate real-time cloud based data with a human perspective and social networking.Â  I am using the term,<span> transactional realitiesÂ  to suggest an extension into social augmented experiences ofÂ  what, Di-Ann Eisnor, </span><a id="s050" title="Platial" href="http://www.platial.com/"><span>Platial</span></a><span>, describes as,Â  &#8220;</span><span><span><span>transactional cartography&#8221; &#8211; &#8220;the movement from map providing entertainment/information to map as enabling action&#8221; (see </span><a id="h6.r" title="Human as Sensors" href="http://www.youtube.com/watch?v=Di285pgcZRE&amp;feature=PlayList&amp;p=F664D8C553A57C93&amp;index=3"><span>Human as Sensors</span></a><span>).</span></span></span></p>
<p>We have only just got a glimpse ofÂ  how real time technologies and &#8220;communications embedded in context&#8221; will transform social interaction and our cities.Â  This post on <a id="r3ow" title="Writing as Real-Time Performance" href="http://snarkmarket.com/2009/3605">Writing as Real-Time Performance</a> that looks at the Google Wave playback feature is a brilliant example of how real time technology turns familiar practices like writing inside out, and catapaults us into new time trajectories. And, if you haven&#8217;t already seen Matt Jones of BERG&#8217;s, brilliant look at, <a href="http://berglondon.com/blog/2009/10/26/all-the-time-in-the-world-talk-at-design-by-fire-2009-utrecht/" target="_blank">&#8220;All the time in the world&#8221; </a>- from the &#8220;soft time&#8221; and &#8220;squishy time&#8221; ofÂ  cell phone culture, to their anticedents in real-time computing, go now!Â  Also see Dan Hill&#8217;s work on <a href="http://cityofsound.com" target="_blank">&#8220;time based notation,&#8221;</a> and Tom Carden&#8217;s work for mysociety.org</p>
<p><span> </span></p>
<h3>Transactional Realities Between the &#8220;Asynchronous City&#8221; and the &#8220;Synchronous Internet ofÂ  Things&#8221;</h3>
<p><span> </span><span id="nqbb" title="Click to view full content"><span>Out of Toward the Sentient City&#8217;s five commissioned works,</span><span> only</span></span><span id="n:_n" title="Click to view full content"><span> </span></span><span> </span><a href="http://www.sentientcity.net/exhibit/?p=31" target="_blank"><span>Trash Track</span></a><span> </span><span id="nqbb" title="Click to view full content"></span><span> </span><span id="n:_n" title="Click to view full content"><span>focused on the â€œsynchronized Internet of Things.â€ </span></span><a href="http://www.sentientcity.net/exhibit/?p=31" target="_blank"><span id="n:_n" title="Click to view full content"><span> </span></span></a><span id="n:_n" title="Click to view full content"><span>Trash Track asks what can we learn from the aggregated data streams of â€œsmartâ€ trash about</span></span><span> the infamous path of trash from cities of privilege to rivers of want,Â  rather than</span><span id="rkuc" title="Click to view full content"><span> exploring the the particular transactional realities of a social network that linked people with their trash</span></span><span id="n.6p" title="Click to view full content"> </span></p>
<p><span id="ft58" title="Click to view full content"><br />
<span> </span></span><span id="ft58" title="Click to view full content"> </span><span id="n.6p" title="Click to view full content"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/TrashTrack2.jpg"><img title="TrashTrack2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/TrashTrack2-300x199.jpg" alt="TrashTrack2" width="300" height="199" /></a></span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/TrashTrack2.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack4.jpg"><img title="trashtrack4" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack4-300x199.jpg" alt="trashtrack4" width="300" height="199" /></a><span id="ft58" title="Click to view full content"><span> </span></span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack3.jpg"><img class="alignnone size-medium wp-image-4768" title="trashtrack3" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrack3-300x199.jpg" alt="trashtrack3" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrackpost.jpg"><img class="alignnone size-medium wp-image-4782" title="trashtrackpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/trashtrackpost-300x199.jpg" alt="trashtrackpost" width="300" height="199" /></a></p>
<p><span id="ft58" title="Click to view full content"><span>The goals of </span></span><span id="ft58" title="Click to view full content"><span>Trash Track </span></span><span id="ft58" title="Click to view full content"><span>were</span></span><span id="ft58" title="Click to view full content"><span>, Assaf </span></span><span id="ft58" title="Click to view full content"><span>Biderman explained during his presentation:</span></span></p>
<p><span id="ft58" title="Click to view full content"><span> <strong>â€œto learn about the removal chain, to see if knowing more cou</strong></span></span><strong><span id="f:mt" title="Click to view full content"><span>ld promote behavioral change, and investigate if smart tagging could one day lead to 100% recycling.â€ </span></span></strong></p>
<p><strong><span id="f:mt" title="Click to view full content"> </span></strong><span>The team from SENSEable City Laboratory, MIT included &#8211; Carlo Ratti: Director, Assaf Biderman: Associate Director, Rex Britter: Advisor, Stephen Miles: Advisor, Kristian Kloeckl Project Leader, Musstanser Tinauli, E Roon Kang, Alan Anderson, Avid Boustani, Natalia Duque Ciceri, Lorenzo Davolli, Samantha Earl, Lewis Girod, Sarabjit Kaur, Armin Linke, Eugenio Morello, Sarah Neilson, Giovanni de Niederhausern, Jill Passano, Renato Rinaldi, Francisca Rojas, Louis Sirota, Malima Wolf.</span></p>
<p><span>However, Assaf,Â  in his presentation, presented another project from SENSEable City Laboratory in partnership with the City of Copenhagen, </span><a href="http://senseable.mit.edu/copenhagenwheel/" target="_blank">The Copenhagen Wheel</a>.Â  <span>This project seems to work brilliantly at the intersection of the &#8220;asynchronous city&#8221; (Bleeker and Nova) and the &#8220;synchronized internet of things&#8221;Â  The &#8220;smart&#8221; wheel &#8211; a low cost, open source, human electric hybrid is:</span></p>
<p><strong>&#8220;an electric bicycle wheel that can be easily retrofitted into any regular bicycle and location and environmental sensors which are powered by the bike wheel and in turn provide data for a variety of applications.&#8221;</strong></p>
<p>This project, that aims to promote urban sustainability through smart biking, opens up many possibilities for a bottom up architecture of participation for the sentient city (<a href="http://senseable.mit.edu/copenhagenwheel/">see video here</a>). <strong><br />
</strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-08-at-7.18.45-PM.png"><img class="alignnone size-medium wp-image-4838" title="Screen shot 2009-11-08 at 7.18.45 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/11/Screen-shot-2009-11-08-at-7.18.45-PM-300x218.png" alt="Screen shot 2009-11-08 at 7.18.45 PM" width="300" height="218" /></a><br />
</strong></p>
<p><a href="http://www.andinc.org/v3/" target="_blank">Mark Shepard</a> describes something he calls &#8220;propagativeÂ  urbanism:&#8221;</p>
<p><strong>&#8220;a way of thinking about shaping the experience of urban space in terms of a bottom-up, participatory approach to the evolution of cities.&#8221; </strong></p>
<p>And, in the most recent pamphlet in the <a href="http://www.situatedtechnologies.net/" target="_blank">Situated Technologies pamphlets </a><span><a href="http://www.situatedtechnologies.net/" target="_blank">series, #5 â€œAsynchonicity Design Fictions for Asynchronous Urban Computing,â€ </a>Julian Bleeker and Nicolas Nova invert an emphasis in the so-called â€œreal-time database enabled cityâ€ with its synchronized Internet of Thingsâ€¦.Â  and speculate on the existence of an â€œasynchronous city.â€Â  They &#8220;forecast situated technologies based on weak signals that show the importance of time on human perspectives.â€Â  They ask:</span></p>
<p><span><strong>&#8220;why, besides &#8216;operational efficiency,&#8217; would we want a ubiquitously computed environment?Â  What are the measures of &#8216;better&#8217; that we want to count as meaningful?&#8221;</strong></span></p>
<p><span>They explain:</span></p>
<p><span><strong>..we are trying to think through what &#8220;urbanwares might be &#8211; urban operating systems &#8211; if they were less about synchronization, top-down construction and connected channels of information and databases and so forth, and more about asynchronized, decentralized things.Â  Software, data, time out of alignment, incongruities, tiles and imbrications of the geographic, spatial parameters into a delicious kind of lively peasant&#8217;s stew.&#8221; </strong><br />
</span></p>
<p><span>One takeaway, perhaps, from Toward the Sentient City is that it&#8217;s at the intersection ofÂ  theÂ  â€œasynchronous cityâ€Â  and theÂ  â€œreal-time database enabled cityâ€ where many new transactional realities of the sentient city will arise.</span></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/11/09/toward-the-sentient-city-the-future-of-the-outernet-and-how-to-imagine-it/feed/</wfw:commentRss>
		<slash:comments>2</slash:comments>
		</item>
		<item>
		<title>ISMAR 2009: An Augmented Reality &#8220;Top Chef&#8221; Coopetition</title>
		<link>http://www.ugotrade.com/2009/10/24/ismar-2009-an-augmented-reality-top-chef-coopetition/</link>
		<comments>http://www.ugotrade.com/2009/10/24/ismar-2009-an-augmented-reality-top-chef-coopetition/#comments</comments>
		<pubDate>Sat, 24 Oct 2009 22:26:42 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[Acrossair]]></category>
		<category><![CDATA[AR Sketch]]></category>
		<category><![CDATA[AR Wave]]></category>
		<category><![CDATA[arduino]]></category>
		<category><![CDATA[ARhrrr]]></category>
		<category><![CDATA[augmented reality at VW]]></category>
		<category><![CDATA[avatars and people together in physical spaces]]></category>
		<category><![CDATA[Avilus]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[Chetan Damani]]></category>
		<category><![CDATA[Christine Perey]]></category>
		<category><![CDATA[cloud computing]]></category>
		<category><![CDATA[Dirk Groten]]></category>
		<category><![CDATA[distributed computing]]></category>
		<category><![CDATA[eyewear for augmented reality]]></category>
		<category><![CDATA[geoAR]]></category>
		<category><![CDATA[Georg Klein]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[Green Tech AR Competition]]></category>
		<category><![CDATA[HMDs]]></category>
		<category><![CDATA[Humans as Sensors]]></category>
		<category><![CDATA[industrial augmented reality]]></category>
		<category><![CDATA[Institut Graphische Datenverarbeitung]]></category>
		<category><![CDATA[ISMAR 2009]]></category>
		<category><![CDATA[ISMAR 2010]]></category>
		<category><![CDATA[ISMAR09]]></category>
		<category><![CDATA[Jay Wright]]></category>
		<category><![CDATA[Joe Ludwig]]></category>
		<category><![CDATA[Junaio]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[Mark Billinghurst]]></category>
		<category><![CDATA[Markus Tripp]]></category>
		<category><![CDATA[Metaio]]></category>
		<category><![CDATA[Michael Goesele]]></category>
		<category><![CDATA[Microsoft and augmented reality]]></category>
		<category><![CDATA[Mobile Monday]]></category>
		<category><![CDATA[Mobilizy]]></category>
		<category><![CDATA[MoMo]]></category>
		<category><![CDATA[Noah Zerking]]></category>
		<category><![CDATA[Noora Guldemond]]></category>
		<category><![CDATA[Ogmento]]></category>
		<category><![CDATA[open distributed AR]]></category>
		<category><![CDATA[open hardware]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[participatory sensing]]></category>
		<category><![CDATA[Pattie Maes]]></category>
		<category><![CDATA[Peter Meier]]></category>
		<category><![CDATA[Platial]]></category>
		<category><![CDATA[PTAM on an iphone]]></category>
		<category><![CDATA[Put a Spell. Thomas Carpenter]]></category>
		<category><![CDATA[RoomWare]]></category>
		<category><![CDATA[Sean White]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[smart phones]]></category>
		<category><![CDATA[social augmented experiences]]></category>
		<category><![CDATA[social augmented realities]]></category>
		<category><![CDATA[standards for augmented reality]]></category>
		<category><![CDATA[Steven Feiner]]></category>
		<category><![CDATA[Technische Universitat Munchen]]></category>
		<category><![CDATA[The RoomWare Project]]></category>
		<category><![CDATA[The Zerkin Glove]]></category>
		<category><![CDATA[tracking and mapping in mobile augmented reality]]></category>
		<category><![CDATA[transactional cartography]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[Vernor Vinge]]></category>
		<category><![CDATA[virtual pets]]></category>
		<category><![CDATA[Volkswagen augmented reality group]]></category>
		<category><![CDATA[Vuzix]]></category>
		<category><![CDATA[Wave]]></category>
		<category><![CDATA[Wave enabled augmented reality]]></category>
		<category><![CDATA[Web 2.0 Summit]]></category>
		<category><![CDATA[Yuri van Geest]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4670</guid>
		<description><![CDATA[ISMAR 2009 -Â  was an extraordinary mix ofÂ  high geek, academic eminence, gungho Dutch Cowboy entrepreneurial spirit, German engineering and industry, brilliant artistry, and invention, all fueled by a sense, and a very active presence in the case of Diamond Sponsor &#8211; Qualcomm, that the big technology players are waking up to augmented reality. In [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MetaioLayarpost.jpg"><img class="alignnone size-medium wp-image-4674" title="Metaio&amp;Layarpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MetaioLayarpost-300x199.jpg" alt="Metaio&amp;Layarpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DirkseesDirkonJunaiopost.jpg"><img class="alignnone size-medium wp-image-4676" title="DirkseesDirkonJunaiopost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DirkseesDirkonJunaiopost-300x199.jpg" alt="DirkseesDirkonJunaiopost" width="300" height="199" /></a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dirkwatchesdirkvcupost.jpg"><img class="alignnone size-medium wp-image-4675" title="dirkwatchesdirkvcupost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/dirkwatchesdirkvcupost-300x199.jpg" alt="dirkwatchesdirkvcupost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/metaiodinasaurpost.jpg"><img class="alignnone size-medium wp-image-4678" title="metaiodinasaurpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/metaiodinasaurpost-299x201.jpg" alt="metaiodinasaurpost" width="299" height="201" /></a></p>
<p><a href="http://www.ismar09.org/" target="_blank">ISMAR 2009</a> -Â  was an extraordinary mix ofÂ  high geek, academic eminence, gungho Dutch Cowboy entrepreneurial spirit, German engineering and industry, brilliant artistry, and invention, all fueled by a sense, and a very active presence in the case of Diamond Sponsor &#8211; Qualcomm, that the big technology players are waking up to augmented reality.</p>
<p>In the picture sequence above (click on photos to enlarge),Â  <a href="http://twitter.com/metaioUS" target="_blank">Noora </a><span><span><a href="http://twitter.com/metaioUS" target="_blank">Guldemond</a></span></span><span><span>, <a href="http://www.metaio.com/" target="_blank">Metaio</a>, demonstrates <a href="http://www.junaio.com/" target="_blank">Junaio</a> (coming to an iphone near you Nov 2nd) to <a href="http://twitter.com/dirkgroten" target="_blank">Dirk Groten</a>, CTO of<a href="http://layar.com/" target="_blank"> Layar</a> (top left photo).Â  One of the nice social features of Junaio is that users can share the 3D augmented scenes they have created.Â  Noora is demoing this capability to </span></span><span><span>Dirk, and as you can see he cracks up when he sees theÂ  scene Noora has stored on her phone.Â  Dirk and I both recognize that this cute little dinosaur augmentation (close up above on bottom left) must have been created by <a href="http://www.metaio.com/company/" target="_blank">Peter Meier, CTO of Metaio</a>, during the Interoperability and Standards workshop earlier that day.Â  Metaio it seems were discussing standards while enjoying some 3D augmented back chat.<br />
</span></span></p>
<p><span><span> Both Dirk and I were active participants in the workshop too.Â  But little did we know that Peter Meier had introduced his little 3D dinosaur into our discussion while we diligently, and sometimes heatedly, debated the merits of XMPP, Wave Federation Protocol,Â  KML, ARML, VRML, X3D, andÂ  more!Â  The photo I took is on the bottom right of the four pics above. It was probably taken very shortly after Peter&#8217;s augmented Junaio scene.Â  Of course there is no little dinosaur in my pic ofÂ  Dirk Groten with <a href="http://twitter.com/JoeLudwig" target="_blank">Joe Ludwig</a> and <a href="http://twitter.com/markustripp" target="_blank">Markus Tripp of Mobilizy</a> who were discussing AR standards oblivious to Peter&#8217;s virtual pet in our midst.<br />
</span></span></p>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MarkusTrippPeterMeier.jpg"><img class="alignnone size-medium wp-image-4685" title="MarkusTrippPeterMeier" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/MarkusTrippPeterMeier-300x199.jpg" alt="MarkusTrippPeterMeier" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Thereisawillingnesstostandardizepost.jpg"><img class="alignnone size-medium wp-image-4686" title="Thereisawillingnesstostandardizepost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Thereisawillingnesstostandardizepost-300x199.jpg" alt="Thereisawillingnesstostandardizepost" width="300" height="199" /></a><br />
</span></span></p>
<p><span><span>I must say I had noticed an impish look on Peter Meier&#8217;s face (see photo above on the left &#8211; Peter is wearing glasses and holding a phone).Â  And Markus Tripp, of MobilizyÂ  revealed a little bit of gaming of his own, when he let out that, in part, ARML is a provocation.Â  But Peter was clearly unfazed and enjoying himself.Â  Dirk, tasked to summarize our discussion, stalwartly maintained an optimistic but serious tone fitting for a standards discussion:Â  &#8220;There is a willingness to standardize&#8230;.,&#8221; he began (pic above on left &#8211; click to enlarge and read text). </span></span></p>
<p><span><span> But it was a little 3D dinosaur that, perhaps appropriately, had the last laugh. Fitting, as I am not sure whether anything anyone says about AR standards at the moment will hold up.Â  But, as Ori commented in <a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">his great post &#8211; an epilogue for ISMAR 2009,</a> the vibe was &#8220;Peace and Love&#8221; in AR Browser land (</span></span>although Chetan Damani of <a href="http://gamesalfresco.com/?s=%22acrossair%22" target="_blank">Across Air</a> was not in the standards discussion because he attended the UX/content? workshop instead)<span><span>.Â  But as they say, &#8220;all&#8217;s fair in love and war.&#8221;Â  And it is my feeling the games have barely begun!Â  There are many players (<a href="http://www.youtube.com/watch?v=KI4lB00Ht9o&amp;feature=player_embedded#" target="_blank">virtual pets </a>included) waiting in the wings. I met some at ISMAR, and they are just itching to join the frey.<br />
</span></span></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/coopetitionpost.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ARConsortiumpost2.jpg"><img class="alignnone size-medium wp-image-4701" title="ARConsortiumpost2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ARConsortiumpost2-300x188.jpg" alt="ARConsortiumpost2" width="300" height="188" /></a><img class="alignnone size-medium wp-image-4690" title="coopetitionpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/coopetitionpost-300x185.jpg" alt="coopetitionpost" width="300" height="185" /></p>
<p><span><span>Ori Inbar, <a href="http://ogmento.com/" target="_blank">Ogmento </a>and Robert Rice, <a href="http://www.neogence.com/#/home" target="_blank">Neogence Enterprises</a>, both founders of the <a href="http://www.arconsortium.org/" target="_blank">AR Consortium</a>, made great efforts to set our young industry off on the right foot -Â  in theÂ  spirit of <a href="http://en.wikipedia.org/wiki/Coopetition" target="_blank">coopetition </a>(</span></span>a <a title="Neologism" href="http://en.wikipedia.org/wiki/Neologism">neologism</a> coined to describe <a title="Co-operation" href="http://en.wikipedia.org/wiki/Co-operation">cooperative</a> <a title="Competition" href="http://en.wikipedia.org/wiki/Competition">competition)</a><span><span>. See </span></span><a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">Curious Raven for </a><a href="http://curiousraven.squarespace.com/home/2009/10/23/ismar-09-observations-and-comments.html" target="_blank">Robert&#8217;s conference observations</a>, and <span><span><a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">Ori&#8217;s post on Games Alfresco</a> for more about </span></span>Mobile Augmented Reality at ISMAR 2009.Â  The Mobile Augmented Reality Workshops were driven by an indomitable spokesperson for the new AR industry, <a href="http://www.perey.com/" target="_blank">Christine Perey</a>.Â  Christine not only helped motivate discussion on the issue of oxygen to the system, i.e. business value, but also she was a very generous connector at the conference.</p>
<p><span><span><br />
</span></span></p>
<h3>What&#8217;s Next From Augmented Reality&#8217;s Top Chefs?</h3>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-7.15.58-PM.png"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-7.12.35-PM.png"><img class="alignnone size-medium wp-image-4692" title="Screen shot 2009-10-24 at 7.12.35 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-7.12.35-PM-300x196.png" alt="Screen shot 2009-10-24 at 7.12.35 PM" width="300" height="196" /></a><br />
</span></span></p>
<p>As Ori pointed out, <a href="http://www.imdb.com/name/nm0218033/" target="_blank">Kent Demaine</a>, <a href="http://www.ooo-ii.com/" target="_blank">oooii</a> (pic above is from the oooii web site), Minority report VFX designer was hanging out at ISMAR 2009 and he came to the panel I was on: &#8220;Augmented Reality in Sports,Â  Entertainment and Advertising.&#8221;Â  We chatted afterwords about instrumented environments and how this is such a key to development interesting augmented experiences.Â  Also I mentioned how back in the day I was involved in some of the early development of motion control software.Â  And it was great to hear Kent say they were still finding motion control cool at <a href="http://www.ooo-ii.com/" target="_blank">oooii</a>.Â  As Ori notes, he is the &#8220;guy with the most enviable AR credentials in the world (the guy who designed VFX for minority report)<strong>,&#8221;</strong><strong> </strong>and <a href="http://www.ooo-ii.com/" target="_blank">oooii</a> is busy and hiring.</p>
<p>One of the highlights of the Arts, Media and Humanities track for me was meeting <a href="http://jarrellpair.com/" target="_blank">JarrellÂ  Pair.</a> He really brought the best out in panelists with his well tuned questions.Â  The recording of ISMAR was comprehensive and videos should be up next week.Â  I will post the slides on Ugotrade of my presentation:Â  &#8220;The Next Wave of AR: Shared Augmented Realities and Remix Culture.&#8221;.</p>
<h3>&#8220;Mixed and Augmented Reality: &#8216;Scary and Wondrous&#8217;&#8221; &#8211; <a href="http://en.wikipedia.org/wiki/Vernor_Vinge" target="_blank">Vernor Vinge</a></h3>
<p><strong>&#8220;Imagine an environment where most physical objects know where they are, what they are, and can, (in principle) network with any other object. With this infrastructure, reality becomes its own database.Â  Multiple consensual virtual environments are possible, each oriented to the needs of its constituency.Â  If we also have open standards, then bottom-up social networks and even bottom up advertising become possible. Now imagine that in addition to sensors, many of these itsy-bitsy processors are equipped with effectors.Â  Then the physical world becomes much more like a software construct.Â  The possibilities are both scary and wondrous.&#8221;</strong> (<a href="http://en.wikipedia.org/wiki/Vernor_Vinge" target="_blank">Vernor Vinge</a> -Â  intro to ISMAR 2009)</p>
<p>Vernor Vinge&#8217;s short intro to ISMAR 2009 (which can be downloaded with the <a href="http://www.ismar09.org/" target="_blank">ISMAR 2009 schedule here)</a> captures the essence of the &#8220;Scary and Wondrous&#8221; dawn of the age of ubiquitous computing and mixed and augmented reality.Â  It is definitely worth a moment to download.Â  The future of augmented and mixed realities, as Vernor Vinge points out, is tied up in a &#8220;tension between centralized and distributed computing&#8221; that &#8220;will continue long into the future.&#8221; One ofÂ  my fascinations with Wave is that it offers a tantalizing opportunity to explore augmented reality in an open distributed architecture.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-12-at-2.40.39-PM.png"><img class="alignnone size-medium wp-image-4586" title="Screen shot 2009-10-12 at 2.40.39 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-12-at-2.40.39-PM-300x154.png" alt="Screen shot 2009-10-12 at 2.40.39 PM" width="300" height="154" /></a></p>
<p>At ISMAR, I talked with as many people as possible about the AR Wave project &#8211; <a href="../../2009/10/13/ar-wave-layers-and-channels-of-social-augmented-experiences/" target="_blank">see my post here for more about Wave enabled AR</a>.Â  Many people were very enthusiastic to join the AR wave and the only thing I really lacked was about 100 invites to hand out!</p>
<h3>&#8220;Everything, Everywhere &#8211; making visible the invisible&#8221;</h3>
<p>Some of the areas that I would have liked to see given more attention on at ISMAR were sensor networks, data curation, and user experience.Â  Not that these areas were entirely neglected with Pattie Maes, MIT as a keynote speaker, and Mark Billinghurst presenting on some fascinating work on social augmented experiences and user experience.Â  I highly recommend catching up on these and other ISMAR presentations when the videos go up.</p>
<p><a href="http://www1.cs.columbia.edu/~swhite/" target="_blank"><img class="alignnone size-medium wp-image-4716" title="Screen shot 2009-10-25 at 12.28.25 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-12.28.25-PM-300x57.png" alt="Screen shot 2009-10-25 at 12.28.25 PM" width="300" height="57" /></a></p>
<p>And, I was very happy to meet and talk to <a href="http://www1.cs.columbia.edu/~swhite/" target="_blank">Sean White</a> whose work at Columbia University is one of my inspirations (for more <a href="http://www1.cs.columbia.edu/~swhite/" target="_blank">about Sean&#8217;s work see here</a> or click image above):</p>
<p><strong>&#8220;the confluence of powerful connected mobile devices, advances in computer vision and sensing, and techniques such as augmented reality (AR) enables exciting new opportunities for interacting with this hidden network of dynamic information and shifts the locus of interaction from the desktop computer to the world around us&#8221;</strong></p>
<p>And I had several very interesting conversationsÂ  at ISMAR about developing social augmented experiences that connect us to a physical world that is becoming &#8220;much more like a software construct&#8221; (Vernor Vinge).Â  Dirk Groten, CTO of Layar mentioned a few interesting projects Layar has up their sleeves, including somethingÂ  Layar may be cooking up with <a href="http://www.roomwareproject.org/" target="_blank">The RoomWare Project.</a></p>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-10.03.00-PM.png"><img class="alignnone size-medium wp-image-4697" title="Screen shot 2009-10-24 at 10.03.00 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-10.03.00-PM-300x231.png" alt="Screen shot 2009-10-24 at 10.03.00 PM" width="300" height="231" /></a><br />
</span></span><br />
The picture above is of RoomWare&#8217;s Social RFID Installation for Media Plaza in Utrecht (<a href="http://blog.roomwareproject.org/2008/10/06/social-rfid-installation-for-media-plaza/">read more here</a>).</p>
<h3>Demos Galore!</h3>
<p>In the demo rooms,<a rel="cc:attributionURL" href="http://augmentation.wordpress.com/2009/10/24/ismar-ismar-ismar-where-to-start/augmentation.wordpress.com"> Noah Zerkin</a> (pic below left) pretty much single handedly carried the AR flag for a growing community of augmented reality Makers and Hackers.Â  But his presence was much appreciated, and he tirelessly demoed <a href="http://zerkinglove.com/" target="_blank">The Zerkin Glove.</a> See <a href="http://augmentation.wordpress.com/2009/10/24/ismar-ismar-ismar-where-to-start/" target="_blank">the first of what may be several posts from Noah on ISMAR here</a>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/noah2post.jpg"><img class="alignnone size-medium wp-image-4700" title="noah2post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/noah2post-300x199.jpg" alt="noah2post" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixgogglespost.jpg"><img class="alignnone size-medium wp-image-4704" title="Tish&amp;Vuzixgogglespost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixgogglespost-300x199.jpg" alt="Tish&amp;Vuzixgogglespost" width="300" height="199" /></a></p>
<p>And I got to try out the Vuzix goggles (picture above on right).Â Â  This was my first experience playing an AR game that was smart about real world gravity. It&#8217;sÂ  &#8220;an <span>augmented reality</span> marble game that uses gravity as a <span>game controller</span>&#8221; &#8211; see <a href="http://gamesalfresco.com/2009/08/09/augmented-reality-has-gained-gravity/" target="_blank">Ori Inbar&#8217;s write up here</a>.Â  It was a very compelling experience and I have to say I didn&#8217;t really notice the shortcomings of the Vuzix goggles while I was absorbed in the game. AndÂ  I turned out to be quite good at the game too. It is intuitive unlike the kind ofÂ  rule based games I never have time to learn properly.Â  But what is so special about this project is the tools that it is built with are open, and available for all, and affordable (see this <a href="http://gamesalfresco.com/2009/08/09/augmented-reality-has-gained-gravity/" target="_blank">list on Games Alfresco</a>).</p>
<p>It was a great pleasure to meet <a href="http://www1.cs.columbia.edu/~feiner/" target="_blank">Prof. Steven Feiner</a> (picture on below the left) who heads Columbia University&#8217;s brilliant AR research team at <a href="http://graphics.cs.columbia.edu/top.html" target="_blank">The Columbia University Graphics and User Interfaces Lab.</a></p>
<p>Ori Inbar (pic below on right) also spent a lot of time in the demo room showing off Ogmento&#8217;s lovely AR learning game that delighted attendees, <a href="http://ogmento.com/"><strong>â€œPut a Spell: Learn to Spell with Augmented Reality.â€</strong></a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixpost.jpg"><img class="alignnone size-medium wp-image-4703" title="TishVuzixpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/TishVuzixpost-199x300.jpg" alt="TishVuzixpost" width="199" height="300" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Ogmentopost.jpg"><img class="alignnone size-medium wp-image-4702" title="Ogmentopost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Ogmentopost-199x300.jpg" alt="Ogmentopost" width="199" height="300" /></a></p>
<p>For a round up ofÂ  what&#8217;s next for augmented reality head mounted displays check out, <a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">Games Alfresco here</a>, and Thomas Carpenter&#8217;s excellent review of the <a href="http://thomaskcarpenter.com/2009/10/21/ismar09-hmd-review/">head mounted displays.</a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/GeorgandBlairpost.jpg"><img class="alignnone size-medium wp-image-4712" title="GeorgandBlairpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/GeorgandBlairpost-300x199.jpg" alt="GeorgandBlairpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/cypherpost.jpg"><img class="alignnone size-medium wp-image-4713" title="cypherpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/cypherpost-300x199.jpg" alt="cypherpost" width="300" height="199" /></a></p>
<p><strong>Ori Inbar on Games Alfresco asks is &#8220;Microsoft â€“ the new big player to watch</strong>?&#8221;Â Â  &#8220;<a href="http://www.robots.ox.ac.uk/%7Egk/" target="_blank">Georg Klein</a>, inventor of <a href="http://www.youtube.com/watch?v=pBI5HwitBX4" target="_blank">PTAM-on-an-iPhone</a> (and the smartest Computer Vision guy on the block)&#8221; has joined Microsoft to make Mobile AR.</p>
<p>The picture on the left above shows Georg trying out <a href="http://www.youtube.com/watch?v=Cix3Ws2sOsU&amp;feature=player_embedded" target="_blank">ARhrrr</a> with Blair MacIntyre.Â Â  And on the right Blair is demoing his marker card pack to Senior Vice President of Cypher Entertainment, David Elmekies.Â  Yes ISMAR was abuzz with demos. See<a href="http://compscigail.blogspot.com/2009/10/ismar09-few-demos.html" target="_blank"> </a><a href="http://compscigail.blogspot.com/2009/10/ismar09-few-demos.html" target="_blank">this post</a> from Gail Carmichael for more video demos.</p>
<h3>Next Year ISMAR 2010 in Korea!</h3>
<p><span><span><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ISMARBanquet.jpg"><img class="alignnone size-medium wp-image-4693" title="ISMARBanquet" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/ISMARBanquet-300x199.jpg" alt="ISMARBanquet" width="300" height="199" /></a></span></span></p>
<p><span style="font-weight: normal;"><span style="font-weight: bold;"><span style="font-size: 0.800001em;"> </span></span></span>At the banquet, I managed to find a seat at a table with Sean White (at left in photo above with Christine Perey to his right) and the Columbia University team.Â  The banquet culminated with the â€œPast and Future of ISMARâ€ Panel chaired valiantly by Jay Wright of Qualcomm.Â  We were asked to offer our input for ISMAR 2010.Â  I offered up an idea that I have been nurturing for a while now -Â  to stage a &#8220;Green Tech AR Competition.&#8221;Â  Perhaps, I suggested, we could <span id="zx-." title="Click to view full content">base the competition around a conference (ISMAR 2010 in Korea?) and set up a target rich, instrumented environment for the occassion.Â  I think the Arduino open hardware community and AR developers have a synergy that is just waiting to be explored!Â  And, if we add the innovators of data curation to the mix, e.g., Pachube, AMEE, and Path Intelligence&#8230;(Markus Tripp left ISMAR to speak on a <a href="http://www.web2summit.com/web2009" target="_blank">Web 2.0 Summit</a> panel, <a href="http://www.readwriteweb.com/archives/humans_as_sensors.php" target="_blank">&#8220;Humans as Sensors,&#8221;</a> which also included Path Intelligence, Deborah Estrin on <a href="http://research.cens.ucla.edu/people/estrin/" target="_blank">&#8220;participatory sensing,&#8221;</a> and the brilliant work of <a href="http://twitter.com/dianneisnor" target="_blank">Di-Ann Eisnor</a>, <a href="http://platial.com/" target="_blank">Platial</a>, on &#8220;Transactional Cartography&#8221;).Â  Anyway a big Green tech AR competition could get people working together across the broad spread of AR terrain on some of the sticky problems of user experience.Â  And, with a high level of support from Smart Phone companies, HMDs manufacturers and the chip makers we just might come up with some extraordinary magic.<br />
</span></p>
<p><span id="zx-." title="Click to view full content"> The devil of course will be in the details.Â  But a competition like this could not only motivate key players to come together in the spirit of coopetition but also be an opportunity to show the world the power of AR to make visible the invisible ecosystems that are so important to the health of our planet.<br />
</span></p>
<p>One of the notable presences at ISMAR 2009 was the Qualcomm team.Â Â  Jay Wright&#8217;s presentation (an exclusive for ISMAR) not only outlined AR for 2012, but Jay also talked about some &#8220;close to the metal&#8221; innovation that we will see from Qualcomm very, very soon!Â  I had some time in the press room with Jay and his team prompted by <a href="http://www.mobilemonday.nl/" target="_blank">MoMo&#8217;s </a>Yuri van Geest.Â  When I twittered about Qualcomm&#8217;s presentation at ISMAR, Yuri replied:<strong><br />
</strong></p>
<p><a href="http://twitter.com/vanGeest" target="_blank">vangeest</a> <a href="http://twitter.com/TishShute" target="_blank">&#8220;@tishshute</a>: good stuff, hopefully you will integrate the neat new solutions and ideas in your talk in November ;)&#8221;</p>
<p><strong> </strong>I will be presenting at <a href="http://www.mobilemonday.nl/" target="_blank">MoMo #13</a> on AR, open AR, future of AR and GeoWeb,Â  and hopefully will bring some good news from Qualcomm too.Â  Anyway Jay seemed to like the idea of a Green Tech AR Competition, even though I did stress that I thought it needed some serious sponsorship and BIG prizes.</p>
<p><strong><br />
</strong></p>
<h3>Where&#8217;s the beef? Tracking and Mapping at ISMAR 2009</h3>
<p>On the flight from NYC to Orlando and ISMAR&#8217;o9 I dozed (I had been up late preparing my presentation) and I watched the Dew Tour Pro Skateboard competition and Top Chef on the Food Channel.Â  In this particular episode of Top Chef, the aspiring chefs were all given a brown bag of ingredients by an already famous chef who then judged whether the contenders managed to make a delicious meal with their allotment which was notably lacking in key ingredients of haute cusine.</p>
<p>This metaphor ofÂ  trying to cook up a great meal while perhaps missing the staples is apt for the current early stage of commercial augmented reality.Â  And when I arrived in Orlando, not only were the Dew Tour pro skateboarders staying at the same hotel as ISMAR, but ISMAR itself felt remarkably like an Augmented Reality Top Chef Coopetition.</p>
<p>Much of ISMAR was dedicated to the task ofÂ  providing the meat and potatoes of Augmented Reality, solutions to mobile tracking, mapping and registration, particularly in the Science and Technology track.</p>
<p>Industrial and Military Augmented reality solutions I found out, typically, solve the tracking problems by using fixed mounts which clearly wouldn&#8217;t translate well into the AR everywhere with everything mobile consumer culture expects.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DanielPustkapost.jpg"><img class="alignnone size-medium wp-image-4679" title="DanielPustkapost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/DanielPustkapost-300x199.jpg" alt="DanielPustkapost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-2.41.56-PM.png"><img class="alignnone size-medium wp-image-4726" title="Screen shot 2009-10-25 at 2.41.56 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-2.41.56-PM-300x208.png" alt="Screen shot 2009-10-25 at 2.41.56 PM" width="300" height="208" /></a></p>
<p><em>In the picture on the left Fabian Doil stands by the VW engine that provided some of the outdoor targets for the ISMAR tracking competition.Â  On the right is a picture from the VW&#8217;s presentation on their research and development of AR.</em></p>
<p>I followed the tracking contest, organized by Daniel Pustka and Fabian Doil of Volkswagen, quite closely. And I learned a lot in the process. WhileÂ  it is clear there has been progress in AR mapping and tracking, we still have a ways to go.</p>
<p>But hanging around the Tracking Competition was a good way to find out the state of play of this crucial part of the AR dream.Â  For example,Â  a little tidbit I learned is that <a href="http://www.gris.informatik.tu-darmstadt.de/~mgoesele/" target="_blank">Michael Goesele </a>who has been reconstructing &#8220;high-quality geometry models from images collected from the internet (so called community photo collections, CPC)&#8221; is soon to be at the <a href="http://www.ini-graphics.net/ini-graphicsnet/members/fraunhofer-institut-fuer-graphische-datenverarbeitung-igd.html" target="_blank">Institut Graphische Datenverarbeitung</a> where top contenders in the tracking contest &#8211; Harald WuestÂ  and Folker Weintipper (in the foreground of the photo at the left and right respectively) are also to be found. [update Harold and Folker were the winning team <a href="http://docs.google.com/gview?a=v&amp;pid=gmail&amp;attid=0.1&amp;thid=1248dd2927becb21&amp;mt=application%2Fpdf&amp;url=http%3A%2F%2Fmail.google.com%2Fmail%2F%3Fui%3D2%26ik%3De77cfddae9%26view%3Datt%26th%3D1248dd2927becb21%26attid%3D0.1%26disp%3Dattd%26zw&amp;sig=AHBy-hbcqUsaRNjbqpHO8vAF_vJqfDrMig" target="_blank">see here for details of scoring and results</a>!] Otto Korkalo and Tuomas Kantonen of VTT, Finland, Augmented Reality team are in the background. They have been working on the joint IBM, Nokia and VTT project that brings, <a href="http://www.marketwatch.com/story/researchers-from-ibm-nokia-and-vtt-bring-avatars-and-people-together-for-virtual-meetings-in-physical-spaces-2009-10-19" target="_blank">Avatars and People Together for Virtual Meetings in Physical Spaces.</a></p>
<p>The picture on the right is another team that were doing very well. If my notes serve me well (and please forgive me if they don&#8217;t. I came back with my card wallet overflowing!) the photo on the right showsChristian Waechter (on the left) and Peter Keitler (on the right) of the <a href="http://portal.mytum.de/welcome" target="_blank">Technische Universitat Munchen</a>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/trackingcompetitionpost.jpg"><img class="alignnone size-medium wp-image-4672" title="trackingcompetitionpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/trackingcompetitionpost-300x199.jpg" alt="trackingcompetitionpost" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition2post.jpg"><img class="alignnone size-medium wp-image-4681" title="Trackingcompetition2post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition2post-300x199.jpg" alt="Trackingcompetition2post" width="300" height="199" /></a></p>
<p>Germany is certainly leading the way in industrial AR. And I learned how small businesses like Metaio get to work with top research institutions and big companies like VW, thanks to very strong German funding program for AR and VR. The current iteration of a series of funding programs isÂ  called<a href="http://www.avilus.de/" target="_blank"> Avilus</a>.Â  AvilusÂ  is putting 42 million Euros into AR and VR this year alone (click on the slide below to see more about Avilus ).</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-1.08.48-AM.png"><img title="Screen shot 2009-10-24 at 1.08.48 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-1.08.48-AM-300x212.png" alt="Screen shot 2009-10-24 at 1.08.48 AM" width="300" height="212" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-2.04.50-AM.png"><img class="alignnone size-medium wp-image-4673" title="Screen shot 2009-10-24 at 2.04.50 AM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-24-at-2.04.50-AM-300x202.png" alt="Screen shot 2009-10-24 at 2.04.50 AM" width="300" height="202" /></a></p>
<p>I wish we had the equivalent of Avilus here in the US.Â  But there is no equivalent to Arvilus for AR here, andÂ  no AR isÂ  being developed by the US car industry either it seems.Â  But look at the slide above to get a taste of some of the cool stuff Metaio and other small AR and VR businesses do for VW through the Avilus project.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/VWtrackinggudrunpost.jpg"><img class="alignnone size-medium wp-image-4682" title="VWtrackinggudrunpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/VWtrackinggudrunpost-300x199.jpg" alt="VWtrackinggudrunpost" width="300" height="199" /></a></p>
<p>I also got to meet many people from one of the world&#8217;s most important AR hubs -Â  The Department of Informatics, <a href="http://portal.mytum.de/welcome" target="_blank">Technische Universitat Munchen</a>, including Prof. Gudren Klinker on the far right in pic above.Â  And from left to right, Fabian Doil (VW, co-organizer of contest), Sebastian Lieberknecht , Selim Ben Himane (Metaio), Tobias Eble (Metaio).Â  Prof. Klinker is the engine behind much of German innovation in AR.</p>
<p>Metaio was one of the few teams to rely mainly on markerless tracking which in this contest was very challenging because of the very different light conditions (see pics below) between the windowless interior and dazzling Florida sunshine outside (pic on the right shows targets under ideal lighting conditions).Â  Many people in the US may beÂ  familiar with Metaio&#8217;s consumer applications, like Junaio,Â  but thanks to Germany&#8217;s efforts to nurture augmented and virtual reality they are also respected software developers in industrial AR.Â  And I suspect that Metaio will spearhead markeless tracking in consumer AR too.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition5post.jpg"><img class="alignnone size-medium wp-image-4740" title="Trackingcompetition5post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Trackingcompetition5post-300x199.jpg" alt="Trackingcompetition5post" width="300" height="199" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-7.47.44-PM.png"><img class="alignnone size-medium wp-image-4745" title="Screen shot 2009-10-25 at 7.47.44 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-7.47.44-PM-300x229.png" alt="Screen shot 2009-10-25 at 7.47.44 PM" width="300" height="229" /></a></p>
<p>This post as usual has already expanded to something much longer than I originally attended &#8211; pretty typical for me! There is much I have not been able to cover including some of the interesting contributions by augmented reality artists at ISMAR &#8211; again I recommend the upcoming videos.</p>
<p>But I cannot end without a hat tip to, Oriel, Nate et al. who won the best student paper award for AR Sketch &#8211; again please <a href="http://gamesalfresco.com/2009/10/23/ismar-2009-epilogue-a-new-augmented-reality-world-order/" target="_blank">see Games Alfresco for more on this</a> (pic below from Games Alfresco). AR Sketch, Ori notes, is featured &#8220;in our <a href="http://gamesalfresco.com/2009/10/16/ismar-2009-sketch-and-shape-recognition-preview-from-ben-gurion-university/" target="_self">top post</a> and popular <a href="http://www.youtube.com/watch?v=M4qZ0GLO5_A" target="_blank">video</a>.&#8221; And</p>
<p><strong>&#8220;Their work is revolutionizing the AR world by avoiding the need to print markers â€“ or any images whatsoever.&#8221;</strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-1.58.35-PM1.png"><img class="alignnone size-medium wp-image-4719" title="Screen shot 2009-10-25 at 1.58.35 PM" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/10/Screen-shot-2009-10-25-at-1.58.35-PM1-300x223.png" alt="Screen shot 2009-10-25 at 1.58.35 PM" width="300" height="223" /></a><br />
</strong></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/10/24/ismar-2009-an-augmented-reality-top-chef-coopetition/feed/</wfw:commentRss>
		<slash:comments>9</slash:comments>
		</item>
		<item>
		<title>Games, Goggles, and Going Hollywood&#8230;How AR is Changing the Entertainment Landscape: Talking with Brian Selzer, Ogmento</title>
		<link>http://www.ugotrade.com/2009/08/30/games-goggles-and-going-hollywood-how-ar-is-changing-the-entertainment-landscape-talking-with-brian-selzer-ogmento/</link>
		<comments>http://www.ugotrade.com/2009/08/30/games-goggles-and-going-hollywood-how-ar-is-changing-the-entertainment-landscape-talking-with-brian-selzer-ogmento/#comments</comments>
		<pubDate>Mon, 31 Aug 2009 03:38:38 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[Artificial Intelligence]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[nanotechnology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Virtual Meters]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[alternate reality RPG]]></category>
		<category><![CDATA[ambient intelligence]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[AR Network]]></category>
		<category><![CDATA[AR spam]]></category>
		<category><![CDATA[ARBalloon]]></category>
		<category><![CDATA[ARN]]></category>
		<category><![CDATA[augmented reality baseball cards]]></category>
		<category><![CDATA[augmented reality development]]></category>
		<category><![CDATA[augmented reality eyewear]]></category>
		<category><![CDATA[augmented reality hotspots]]></category>
		<category><![CDATA[augmented reality industry]]></category>
		<category><![CDATA[augmented reality network]]></category>
		<category><![CDATA[augmented reality on the iphone]]></category>
		<category><![CDATA[augmented reality search]]></category>
		<category><![CDATA[augmented reality toys]]></category>
		<category><![CDATA[Blockade]]></category>
		<category><![CDATA[Brad Foxhoven]]></category>
		<category><![CDATA[Brian Selzer]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Cyberpunk]]></category>
		<category><![CDATA[Evolutionary Reality]]></category>
		<category><![CDATA[EyeToy]]></category>
		<category><![CDATA[eyewear for AR]]></category>
		<category><![CDATA[Games Alfresco]]></category>
		<category><![CDATA[Green Tech AR]]></category>
		<category><![CDATA[jim purbrick]]></category>
		<category><![CDATA[Kensuke Tanabe]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[Layar Developer Conference]]></category>
		<category><![CDATA[location based RPGs]]></category>
		<category><![CDATA[Lumus]]></category>
		<category><![CDATA[markerless AR]]></category>
		<category><![CDATA[markerless mobile augmented reality]]></category>
		<category><![CDATA[markerless natural feature tracking]]></category>
		<category><![CDATA[Masunaga]]></category>
		<category><![CDATA[Metroid]]></category>
		<category><![CDATA[Metroid Prime]]></category>
		<category><![CDATA[Mirrorshades]]></category>
		<category><![CDATA[multiperson mobile AR experiences]]></category>
		<category><![CDATA[Nano Air Vehicles]]></category>
		<category><![CDATA[near field object recognition]]></category>
		<category><![CDATA[new augmented reality trade jargon]]></category>
		<category><![CDATA[Ogmento]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[Pentagon's Robot Hummingbirds]]></category>
		<category><![CDATA[Project Natale]]></category>
		<category><![CDATA[Put a Spell]]></category>
		<category><![CDATA[Robert Rice]]></category>
		<category><![CDATA[Sekai camera]]></category>
		<category><![CDATA[social gaming platforms]]></category>
		<category><![CDATA[sticky light]]></category>
		<category><![CDATA[The Dawn of the Augmented Reality Industry]]></category>
		<category><![CDATA[Tonchidot]]></category>
		<category><![CDATA[Topps AR baseball cards]]></category>
		<category><![CDATA[Total Immersion]]></category>
		<category><![CDATA[Vuzix]]></category>
		<category><![CDATA[Wikitude]]></category>
		<category><![CDATA[Yoshio Sakamoto]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4334</guid>
		<description><![CDATA[Picture on the left Mirrorshades, picture on the right a Metroid Hud. &#8220;Augmented Reality is like a Philip K Dick novel torn off its paperback rack and blasted out of iPhones,&#8221; Bruce Sterling in Beyond the Beyond &#8220;a techno visionary dream come true &#8211; those are rare, really rare, you have to be patient,Â  it&#8217;s [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/mirrorshadespost3.jpg"><img class="alignnone size-full wp-image-4349" title="mirrorshadespost3" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/mirrorshadespost3.jpg" alt="mirrorshadespost3" width="124" height="204" /></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/metroid_hud1post2.jpg"><img class="alignnone size-medium wp-image-4350" title="metroid_hud1post" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/metroid_hud1post2-300x204.jpg" alt="metroid_hud1post" width="300" height="204" /></a></p>
<p><em>Picture on the left <a href="http://www.amazon.com/Mirrorshades-Cyberpunk-Anthology-Greg-Bear/dp/0441533825" target="_blank">Mirrorshades</a>, picture on the right a <a href="http://en.wikipedia.org/wiki/Metroid" target="_blank">Metroid Hud</a>.</em></p>
<p><strong>&#8220;Augmented Reality is like a Philip K Dick novel torn off its paperback rack and blasted out of iPhones,&#8221; <a href="http://www.wired.com/beyond_the_beyond/2009/08/the-key-take-aways-for-investors-interested-in-the-augmented-reality-field/" target="_blank">Bruce Sterling in Beyond the Beyond</a></strong></p>
<p><strong>&#8220;a techno visionary dream come true &#8211; those are rare, really rare, you have to be patient,Â  it&#8217;s super cyberpunk&#8221;&#8230; Bruce Sterling, <a href="http://vimeo.com/6189763" target="_blank">&#8220;At the Dawn of the Augmented Reality Industry.&#8221; </a></strong></p>
<p>The Dawn of the Augmented Reality Industry continues to brighten, and now we have two augmented reality companies, <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a> and <a href="http://ogmento.com/" target="_blank">Ogmento</a>, firmly established in Hollywood &#8211; the dream mother of so many of our augmented realities.<a href="http://ogmento.com/" target="_blank"></a></p>
<p><a href="http://ogmento.com/" target="_blank">Ogmento</a> is the most recent of these two pioneering augmented reality companies to set up shop in LA.Â  <a href="http://www.t-immersion.com/" target="_blank">Total Immersion&#8217;s</a> CEO Bruno Uzzan moved to LA from France two years ago, although he still has a fifty person RandD team in France.Â Â  Total Immersion began 10 years ago in the quiet, lonely, hours before the dawn of an AR industry.Â  But <a href="http://gamesalfresco.com/2009/07/23/mattel-launches-augmented-toys-at-comic-con/" target="_blank">Total Immersion&#8217;s AR toys for Mattel,</a> and augmented reality for <a href="http://www.youtube.com/watch?v=I7jm-AsY0lU" target="_blank">Topps baseball cards</a>, fired CNet writer Daniel Terdiman up enough to say, &#8220;I have seen the future of toys, and it is augmented reality&#8221; (<a href="http://news.cnet.com/8301-13772_3-10317117-52.html" target="_blank">see full post here on CNet</a>).</p>
<p>Recently, I talked withÂ <a href="http://www.ugotrade.com/2009/07/28/augmented-realitys-growth-is-exponential-ogmento-reality-reinvented-talking-with-ori-inbar/" target="_blank"> Ori Inbar, one of the founders of Ogmento </a>andÂ  the premier augmented reality blog <a href="http://gamesalfresco.com/" target="_blank">Games Alfresco</a> about his new venture in Hollywood. Bruce Sterling, <a href="http://twitter.com/bruces" target="_blank">@bruces</a>, had some fun with my invention of <a href="http://www.wired.com/beyond_the_beyond/2009/08/augmented-reality-ogmento/" target="_blank">brand new augmented reality trade jargon here</a>!Â  Ori pointed out Ogmento brings two important new facets to the rapidly growing augmented reality field: firstly they are bringing leadership from veterans of the entertainment industry into augmented reality development. <a id="squu" title="Brad Foxhoven" href="http://www.blockade.com.nyud.net:8080/about/about-blockade" target="_blank">Brad Foxhoven</a> and <a id="odvk" title="Brian Seizer" href="http://brianselzer.com/">Brian Selzer</a> from <a id="xow_" title="Blockade" href="http://www.blockade.com/" target="_blank">Blockade</a> have partnered with Ori on Ogmento.Â  And, in an another important step forward for a young industry, Ogmento announced they will be acting as publishers for a fast growing cohort of augmented reality application developers and helping AR development teams out there bring their concepts to the market.</p>
<p>So I was very happy also to have the opportunity to talk with Brian Selzer.Â  Bruce Sterling pointed out in his seminal<a href="http://eurekadejavu.blogspot.com/2009/08/augmented-realitys-sermon-on-flatlands.html" target="_blank"> sermon from the flatlands</a> at the <a href="http://layar.com/" target="_blank">Layar</a> Developer Conference, AR is kind of a &#8220;Hollywood scene.&#8221; We have seen the web early adopter/developer/blogger communityÂ  embrace augmented reality browser experiences in recent weeks in an awesome wave of enthusiasm. Are Hollywood creatives equally smitten? For the answers see the full interview with Brian Selzer below.</p>
<p>Brian Selzer (<a href="http://brianselzer.com/" target="_blank">www.brianselzer.com</a> and <a href="http://twitter.com/brianse7en" target="_blank">twitter &#8211; brianse7en</a> ) has an extensive involvement with emerging platforms:</p>
<p><strong>&#8220;from launching dot com entertainment sites in the late 90&#8242;s to creating early versions of social gaming platforms, or bringing big brands like Spider-Man and X-Men into the mobile space for the first time. Â Last year I was focused on bringing video game characters and worlds into the online space as UGC [user generated content] projects (<a href="http://www.mashade.com/" target="_blank">mashade.com</a>, <a href="http://www.instafilms.com/" target="_blank">instafilms.com</a>).&#8221;</strong></p>
<p>I began my own career in Hollywood doing motion control photography and creating software that bridged the language of robotics and servo motors with the visions ofÂ  film directors. Eventually our little company, NPlus1, moved on to 3D vision systems and image recognition stuff.Â  So yes, I have been really, really patient waiting for this particular techno visionary dream.Â  And, while I have been waiting for augmented reality to manifest, I have grown to love the internet.Â  But now, how awesome, <a href="../../2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">It is OMG finally for mobile AR!</a></p>
<p>Augmented reality is busting out all over &#8211; through our laptops, our phones, on the streets, toys, baseball cards, art installations, <a href="http://www.youtube.com/watch?v=9noMfsg486Y" target="_blank">sticky light calligraphy</a> and more.</p>
<p>Many of my questions to Brian were directed at at how and when we will see augmented realities with near field object recognition, image recognition and tracking and, of course, the illusive eyewear.Â  As Bruce Sterling points out we are just at the very, very beginning &#8211; the dawn of an industry.Â  I created the photomontage below on the right to compliment <em> <a href="http://www.tonchidot.com/">Tonchidot&#8217;s</a> </em>illustration suggesting the evolutionary inevitability of holding our phones up (below on the left).Â  The Evolutionary Reality of AR will not end there.Â  It is just a step into eyewear, hummingbirds or <a href="http://http://gizmodo.com/5306679/pentagons-robot-hummingbird-christened-nano-air-vehicle" target="_blank">Nano Air Vehicles</a>, and more&#8230;&#8230;.</p>
<h3>The Evolutionary Reality of AR</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/Picture-96.png"><img class="alignnone size-medium wp-image-4359" title="Picture 96" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/Picture-96-300x97.png" alt="Picture 96" width="300" height="97" /></a></p>
<p><em>Cartoon on the left  by  <a href="http://www.tonchidot.com/">Tonchidot</a> on the right a collage of a stock photo and the <a href="http://gizmodo.com/5306679/pentagons-robot-hummingbird-christened-nano-air-vehicle" target="_blank">Pentagon&#8217;s Robot Humming Birds &#8211; </a><a href="http://http//gizmodo.com/5306679/pentagons-robot-hummingbird-christened-nano-air-vehicle" target="_blank">&#8220;Nano Air Vehicles</a>.&#8221;</em><strong><em><strong><a href="http://gizmodo.com/5306679/pentagons-robot-hummingbird-christened-nano-air-vehicle" target="_blank"> </a></strong></em> </strong></p>
<p>While we finally we have, an affordable mediating device with the horse power, mindshare and business model to bring AR mainstream with the iphone.Â  The much anticipated Apple 3.1 Beta SDK to be released in September will not, I am sure, open up the Video API at the levels that augmented realities with near field object recognition and tracking require (I would love to be proved wrong though). But the magic wand to deliver even <span id="b9-2" title="Click to view full content">tightly registered AR graphics/media (that require a lot of CPU and GPU)</span> to a wide audience is in our hands, so full access to may not be far off. And others, of course, can/will/might knock the iphone off its current pedestal.Â  AR made it&#8217;s mobile phone debut on the Android after all.</p>
<p>Like everyone else who loves AR, I wish that Apple would open up faster (and I wish Android would manifest on some rocking hardware). But we will see enough of the iphone Video API open for the next generation of mobile augmented reality games and applications to emerge in the coming months.</p>
<p>One of these will be Ogmento&#8217;s.  Although Ogmento is in stealth mode, they have released <a href="http://www.youtube.com/watch?v=EB45O7-6Xrg&amp;eurl=http%3A%2F%2Fogmento.com%2F&amp;feature=player_embedded" target="_blank">a teaser for their first game, &#8220;Put A Spell,&#8221;</a> developed by ARBalloon â€“ screenshot below.Â  Ori did reveal to me in <a href="../../2009/07/28/augmented-realitys-growth-is-exponential-ogmento-reality-reinvented-talking-with-ori-inbar/" target="_blank">th<span style="color: #551a8b;">is interview</span></a> that they are doing image recognition and using the Imagination AR engine.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/Picture-95.png"><img class="alignnone size-medium wp-image-4356" title="Picture 95" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/Picture-95-300x177.png" alt="Picture 95" width="300" height="177" /></a></p>
<p>As Brian notes, Hollywood has had the AR bug for a long time. AR has been everywhere in Science Fiction Movies and video games. Nintendo&#8217;s SPD3 head Kensuke Tanabe, &#8220;effectively the man in charge of overseeing all the <em>Metroid</em> franchise underneath original co-creator Yoshio Sakamoto,&#8221; explains the story of <em>Metroid</em> to Brandon Boyer of <a href="http://www.offworld.com/2009/08/retro-effect-a-day-in-the-stud.html" target="_blank">Offworld here</a> (an image of a Metroid Hud on the right opening this post) :</p>
<p><strong>&#8220;the idea of the different visors you use in the <em>Prime</em> games to interact with the world: the scan visor, for instance, set the game apart from other first person shooters in that the player was using it to proactively collect information from the world, rather than having the story come to them passively, in the form of cut-scenes or narration. &#8220;<em>Prime</em> could have adventure elements with the introduction of this visor,&#8221; says Tanabe, &#8220;That&#8217;s how we came up with the genre &#8212; first person adventure, instead of shooter.&#8221;</strong></p>
<p>But as Brian points out:</p>
<p><strong>&#8220;the light bulb has been lit and Hollywood is seeing that the software and hardware are here today to deliver these types of AR experiences in real life (to a lesser extent of course, but the path is getting clear).&#8221;</strong></p>
<p><strong><br />
</strong></p>
<h3>Talking with Brian Selzer</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/me.jpg"><img class="alignnone size-full wp-image-4363" title="me" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/me.jpg" alt="me" width="188" height="227" /></a></p>
<p><strong>Tish Shute: </strong>Bruce Sterling&#8217;s sermon at the Layar Developer conference, <a href="http://www.wired.com/beyond_the_beyond/2009/08/at-the-dawn-of-the-augmented-reality-industry/" target="_blank">&#8220;At the Dawn of the Augmented Reality Industry,&#8221;</a> was absolutely awesome. He spread the future feast/orgy of augmented reality before usÂ  &#8211; and described many of the dishes we will tasting both delectable and diabolical.Â  One of the many things he points out is, AR is kind of a &#8220;Hollywood scene.&#8221; And, as Ogmento is one of only two augmented reality companies in Hollywood, I am interested to hear how it looks from your neck of the woods. We have seen the web early adopter/developer/blogger communityÂ  embrace augmented reality browser in recent weeks in an awesome wave of enthusiam &#8211; are Hollywood creatives catching the buzz?</p>
<p><strong>Brian Selzer: Â It was a thrill to hear Bruce Sterling mention Ogmento. I devoured all of his Cyberpunk books back in the 80&#8242;s, along with writers like Gibson, Rucker, Shirley&#8230; To me, sci-fi writers are the visionaries who define and influence our technological paths into the future. They make science and tech sexy enough to want to manifest those experiences in the real world. Clearly Bruce sees the AR industry as being sexy. I love that he called it &#8220;a techno-visionary dream come true&#8230; and super-cyberpunk.&#8221; Â And yes, kind of a Hollywood scene.</strong></p>
<p><strong>Hollywood creatives caught the AR bug before they knew what AR was. Â Look at science fiction movies and video games to see AR everywhere. Terminator, The Matrix, Minority Report, Iron Man.. the list goes on. Â Look at any video game with an integrated heads-up display. Â It&#8217;s clear Hollywood loves AR. Â It&#8217;s only been in the past few months though that the light bulb has been lit and Hollywood is seeing that the software and hardware are here today to deliver these types of AR experiences in real life (to a lesser extent of course, but the path is getting clear). So yes, the buzz is here and it&#8217;s strong. Â With that, we all have to be prepared for the good, the bad and the ugly as AR goes mainstream.</strong></p>
<p><strong>It certainly goes to show how young this industry is when Ogmento and Total Immersion are currently the only AR companies based in Los Angeles. It&#8217;s very exciting to be the only company right now demonstrating a natural feature tracking (markerless) iPhone experience in Hollywood. We are in talks to bring some very big brand and properties to the mobile AR space. The goal is to deliver experiences that create added engagement and value to the consumer.</strong></p>
<p><strong>Tish Shute:</strong> Also in his landmark sermon Bruce Sterling noted that augmented reality has been around for 17 yrs and now at last we are seeing the dawning ofÂ  an augmented reality industry. What inspired you to take up the challenge of launching an augmented reality company in Hollywood?Â  Oh congrats that Bruce Sterling name checked Ogmento in his list of companies that prove that this really is the dawn of an industry!</p>
<p><strong>Brian Selzer: I&#8217;ve always been involved in emerging platforms&#8230; from launching dot com entertainment sites in the late 90&#8242;s to creating early versions of social gaming platforms, or bringing big brands like Spider-Man and X-Men into the mobile space for the first time. Â Last year I was focused on bringing video game characters and worlds into the online space as UGC projects (mashade.com, instafilms.com). Working with all these great CG game assets, I continued to think about what&#8217;s next, and that&#8217;s when I started to follow AR very closely and started engaging with those who were pioneering in the space.</strong></p>
<p><strong>I remember swapping instant messages with <a href="http://curiousraven.squarespace.com/" target="_blank">Robert Rice</a> (<a href="http://twitter.com/robertrice" target="_blank">@robertrice</a>) right after the 2008 Super Bowl.Â  We were not chatting about the football game, but rather about some of the commercials that aired during the event as a sign that AR was making its way into the mainstream.Â  A lot of people became aware of AR for the first time when the <a href="http://ge.ecomagination.com/smartgrid/" target="_blank">GE SmartGrid commercial</a> aired.Â  There were all these YouTube videos popping up of people blowing on holographic wind turbines.</strong></p>
<p><strong>The commercial that really got me excited though was the <a href="http://www.youtube.com/watch?v=Kwke0LNardc" target="_blank">Coke Avatar commercial</a>.Â  In that commercial people in the city were sporadically being portrayed as their digital persona&#8217;s, avatars, gaming characters, etc..Â  For me that spot did a great job showing how many of us already have these â€˜alter egosâ€ that live in cyberspace, and how the line between these worlds can sometimes be blurred. I remember watching that commercial and thinking that is exactly the type of experience Iâ€™d like to create with mobile AR.Â  I want to overlap the virtual world into our every-day reality. Why cant I bring my World of Warcraft or Second Life persona with me into the real world?</strong></p>
<p><strong>I am big on the notion of â€œGames and Goals.â€ I believe that games have the power to motivate people in a very powerful way. By challenging ourselves while playing a game we can climb mountains.Â  Augmented Reality is the perfect platform to bring gaming into the real world.Â  By mixing the virtual world with the physical world, this added layer of perception provides a very powerful experience for something like a role-playing game.</strong></p>
<p><strong>One of my earlier social-gaming projects was a website called Superdudes.Â  This was a â€œBe Your Own Superheroâ€ concept that celebrated and motivated kids to create superhero avatar/persona&#8217;s online, and we gave members all sorts of games, challenges, and rewards, some of which carried into the real world. The site recognized members for teamwork, creativity, volunteer work and things like that. So the Superdudes were often involved in charity events and benefits to help children. Â Everybody called each other by their Superhero names, and the line between fantasy and reality were being blurred. Â This project really got me thinking about what happens when you take positive role-playing like this and mix it into the real world.Â  I started to work on a plan for location-based activist missions for points and rewards, but never got to complete that. So I have some unfinished business here.</strong></p>
<p><strong>I think it would be fantastic to be able to show up to some type of fun event with friends, and everybody could see each others alter ego personas standing before them. When you can turn the world into a playground, and use the power of gaming to make a positive impact on the planet&#8230; well, I donâ€™t think there is anything better than that.Â  These are the types of projects that drive me, and I think AR is the best platform to support these types of social gaming experiences.</strong></p>
<p><strong>Tish:</strong> Does Ogmento have any RPGs under development?Â  I noticed in the Google Wave on RPG someone has been working on doing something with the Dungeons&amp;Dragons API.Â  I am interested in exploring the web of protocols underlying Wave as a transport mechanism for multi-person, mobile, AR experiences (not requiring downloads), on an open global outdoor AR network. If not Wave, what do you see as the potential infrastrucure and protocols we could harness for an open augmented reality network?</p>
<p><strong>Brian: Â Ogmento has a deep background in video games and we interact regularly with most of the major game publishers. As a company we are not so much developing our own RPGs right now, but rather exploring what mobile AR extensions make sense for existing brands. Â There are many limitations to location-based gaming, but a global AR network is exactly along the lines we are thinking. Â Lots of discussions are taking place on protocols, platforms, API&#8217;s, and there are numerous ways to approach this. Â We need to be able to use what&#8217;s available now and continue to refine and customize for AR&#8217;s specific needs and issues as we progress. </strong></p>
<p><strong>In general though, Ogmento is focused on what types of experiences can be had today and over the next couple of years. I still think we are several years out from a truly open augmented reality network. Â We are certainly looking at launching our own &#8220;Ogmented Network&#8221; which would support some fun treasure hunt type experiences, or add an entertainment layer on top of traditional outdoor marketing campaigns.</strong></p>
<p><strong>Tish:</strong> I don&#8217;t know whether you have read Thomas Wrobel&#8217;s ideas for an open augmented reality network that I just <a href="http://www.ugotrade.com/2009/08/19/everything-everywhere-thomas-wrobels-proposal-for-an-open-augmented-reality-network/" target="_blank">published here on Ugotrade</a>.Â  The principals he talks about are very important for augmented reality to become a major part of our lives &#8211; .Â  Considering the difficulty open networks can pose for emerging business models how can we fund the development of an open framework for augmented reality?</p>
<p>&#8220;<em>a future AR Network, I mean one as universal and as standard as the internet. One where people can connect from any number of devices, and without additional downloads, experience the majority of the content.<br />
Where people can just point their phone, webcam, or pair of AR glasses anywhere were a virtual object should be, and they will see it. The user experience is seamless, AR comes to them without them needing to â€œprepareâ€ their device for it.&#8221;</em></p>
<p><strong>Brian: I think funding for these types of projects will definitely come from Venture Capital groups in the near future. Â It&#8217;s early in AR, but the VC&#8217;s are watching and deciding which horses to bet on. Â Until that time, it&#8217;s about service work, and developing AR experiences for others with what is possible today. That work will help fund internal development of original AR products, and platform development.</strong></p>
<p><strong>Tish:</strong> How did you get started with Ogmento?</p>
<p><strong>Brian: My first conversation with Ori was actually about my interest in Location Based RPG concepts.Â Â  We had a long conversation about the possibilities with AR, and it was clear that we shared similar interests, but were coming from different complimentary backgrounds. The idea of collaboration was exciting, so we just kept talking until the timing felt right. Now, with Ogmento we bring a unique blend of AR development experience with a deep backgrounds in AR technology, animation, video games, entertainment, social media, etc.Â Â  I think this is a powerful mix that will allow us to do some great things.</strong></p>
<p><strong>Itâ€™s still so early, and things are just getting started in AR. There are only so many webcam magic tricks you can enjoy before you are ready for something else.Â  The location-based apps have the most potential in my opinion, which is why we are really focused on mobile AR.Â Â  We have some board-game type projects, which do not instantly scream location-based gaming, but if you look at something like the ARhrrr board game, you can see how much more compelling it can be when the game invites the player to be actively moving around during the experience.</strong></p>
<p><strong>Tish:</strong> I am interested in your perspective on how we can create the kind AR experiences that really embody what has always been so exciting about AR &#8211; the tight alignment of graphics and media with real world objects and ultimately a rich immersive 3D experience, so I am going to hit you with a bunch of those, &#8220;Is this really eyewear or vaporware?&#8221; questions.Â  The real deal eyewear changes everything!</p>
<p>While eyeware is a big challenge technically and aesthetically,Â  I am pretty sure that there are several outfits out there that can pull off the optics and projection. â€¨Will the entertainment industry get excited enough to put a major push into delivering the eyewear in short order instead of the 5 to 10 year project that some people still think it is? Â Â  The business development challenge is bigger perhaps than the technical obstacles perhaps? What is your view on this?</p>
<p>And, perhaps, the eyewear is a clear example of a need for partnerships. For example, we have seen efforts from companies like <a href="http://www.vuzix.com/home/index.html" target="_blank">Vuzix</a> and <a href="http://www.lumus-optical.com/" target="_blank">Lumus</a>, and recently a Japanese Company, <a href="http://www.masunaga1905.jp/brand/teleglass/">Masunaga</a>.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/Picture-97.png"><img class="alignnone size-medium wp-image-4386" title="Picture 97" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/Picture-97-300x80.png" alt="Picture 97" width="300" height="80" /></a></p>
<p>I have no reports from people who have tried the Maunaga eyewear yet.Â  But,Â  limited by small field of view, and tethered, currently eyewear offerings, available at a reasonable price point, are not workable solutions for augmented reality experiences. But the problems are not insurmountable. What will facilitate the real deal?Â  â€¨â€¨â€¨It seems that it is critical to start creating hardware relationships now. The industry is costly and slow moving and as Robert Rice put it to me in a recent conversation, &#8220;once the software cat is out of the bag, its going to go wild and if the hardware isnt there, its going to stutter.&#8221;</p>
<p>As Ori notes some of the hardware companies like Intel and others don&#8217;t seem to be paying enough attention to AR.Â  Ori points out they donâ€™t see the demand yet.Â  But in order to create an awesome AR experience and demand from a mass audience, don&#8217;t we need to work in conjunction with hardware designers?</p>
<p><strong>Brian: Itâ€™s fun to think about who will eventually deliver a great hardware solution for AR glasses. It will happen. It would be cool to see somebody like an Oakley or Nike partnered up with a company like Vuzix to deliver something people actually might wear in public. Â Perhaps a hardware manufacturer like Apple or Nokia will bring us something like the iSight or the NGaze down the line. Â Iâ€™d love to see a set of glasses designed by Ideo.Â  Microsoft or Sony are already playing with technologies like Project Natale and the EyeToy, so I think its only a matter of time before they deliver an eyewear solution. I would even look to the toy companies to eventually make an investment here.</strong></p>
<p><strong>Gamers will be the early adopters, and in a few years we may start to see people running around in the park wearing glasses with headsets, but it will be acceptable because it&#8217;s clear they are using them for a game. Â Itâ€™s going to take a very sexy and stylish piece of hardware for everyday people to be willing to wear AR glasses in public while going about their everyday business. Â Â Itâ€™s like the recent cover of Wired magazine where Brad Pitt is wearing a mobile headset in his ear, and the editors point out that even he canâ€™t pull that look off, so why do you think you can. Â When AR glasses come in designer frames, and you can&#8217;t tell them from non-AR glasses, to me thatâ€™s when things get really interesting from a mass-adoption perspective. Â Â Compare how many people were carrying around a mobile phone in the 80s to now.Â  I think it will be the same thing with glasses.</strong></p>
<p><strong>I was in an AR pitch meeting the other week at a very significant media company, and brought up the point that todayâ€™s handheld Smartphones will eventually evolve into tomorrows Smartglasses. My comment was quickly shrugged off as sort of a sci-fi notion that was irrelevant to the business at hand. Â Probably true, but I think it is important to understand where digital media and entertainment is going, so you can adapt quickly, and evolve into those spaces more naturally. Â The more we see people walking around with their Smartphones in front of their face (like a camera), the sooner it will be that we make the jump to eyeglasses as a key hardware device for AR experiences.</strong></p>
<p><strong>At Ogmento, we definitely are working on AR experiences with the hardware and software available today. Â We will get some product out this year, and 2010 will be a banner year for markerless mobile AR in general.Â  I think the entire AR community is looking forward to bringing this technology to the mainstream in the form of games, marketing campaigns, virtual docent apps, and much more.Â  It might not be the full experience we are all dreaming about for some time, but we can see the path and the true potential, and it&#8217;s pretty spectacular.</strong></p>
<p><strong>You mention the tight alignment of graphics and media with real world objects. Â That is really our focus. A lot of well-deserved attention is going to the browser overlay &#8220;post-it&#8221; approach right now, which uses compass and GPS. Â We are focused on markerless natural feature tracking, so once you identify something that is AR enhanced in your environment, you can interact with that integrated experience. Â On an iPhone that can be as simple as using your touch screen to interact. Â When you are wearing glasses, it becomes more about visual tracking. There are lots of smart people thinking through these issues. Many of which you have interviewed. It is my hope that there are exciting collaborative efforts to be had in the coming months to get us all there together and faster.</strong></p>
<p><strong>Tish:</strong> Bruce touched on some of the hard problems that have to be solved for augmented reality &#8211; and he noted for instance security needs to be tackled in the early stages. Robert made a nice list, <em>â€œprivacy, media persistence, spam, creating UI conventions, security, tagging and annotation standards, contextual search, intelligent agents, seamless integration and access of external sensors or data sources, telecom fragmentation, privilege and trust systems, and a variety of others.â€</em> Will Ogmento be leading the way in solving some of these hard problems?</p>
<p>And, won&#8217;t trying to solve these hard problems for networked AR in walled garden scenarios one company at a time lead to a lot of reinventing the wheel wasted energy?</p>
<p><strong>Brian: These are all important issues, and again there are a lot of smart people thinking about solutions to these problems on a daily basis. Â Ogmento is interested in partnering with developers and supporting their efforts as a publisher of mobile AR experiences. Â While we intend to roll up our sleeves in these areas, we are currently more focused on taking AR mainstream with the hardware and software available today. Â As the industry evolves, so will Ogmento. As the opportunities evolve, our ability to make a greater impact tackling these issues will be realized.</strong></p>
<p><strong>Tish: </strong>Another area of development that could really kick AR into high gear might be creating augmented reality hotspotsÂ  where we use can deliver the kind of location accuracy/instrumentation necessary to create interesting AR experiences (partnership with Starbucks, perhaps ?!).Â  Augmented reality hots spots, could deliver the kind of high quality AR experience that isn&#8217;t possible ubiquitously at the moment, and may be a real way to get people really exploring the potential of AR now, rather than later?</p>
<p><strong>Brian: Â Agreed. I see a great opportunity here with this approach.</strong></p>
<p><strong>Tish:</strong> Although there are many obstacles to Green AR &#8211; the energy hogging servers at the backend for starters! Last week I had a conversation with Gavin Starks, <a href="http://www.amee.com/?page_id=289" target="_blank">AMEE</a>, and <a href="http://curiousraven.squarespace.com/" target="_blank">Robert Rice </a>and <a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick</a> about how to work with AMEE and the technology available and encourage Green Tech AR development (<a href="http://blog.pachube.com/2009/06/pachube-augmented-reality-demo-with.html" target="_blank">see an early exploration of green tech AR from Pachube here</a>).</p>
<p>We came up with the idea of holding a competition perhaps centered around a targeted instrumented space. But I would really love to hear your thoughts on the topic of Green Tech AR (the energy hogging servers at the back end being the first cloud on the horizon!.)Â  Cool GreenTech AR imaginings, social gaming ideas, RPGs, not even necessarily even tied to the immediately practical, would be like rain in a drought!</p>
<p><strong>Brian: Â I go back to &#8220;Games and Goals&#8221;&#8230; If you make environmental and other activist efforts fun and rewarding, more are likely to be motivated and participate. Â Can you imagine having a personal &#8220;carbon footprint stat&#8221; floating over your self at all times? Or over your home or factory? Â How would that change your behavior? Â We all love stats. Look at how the Nike+ campaign has used technology and gaming to motivate people to run. Â I think there is a lot that can be done to make being green fun. It starts with the individual, and spreads from there. Â Keep me posted on that one!</strong></p>
<p><strong>Tish:</strong> I would also like to explore further the <a href="http://www.readwriteweb.com/archives/augmented_reality_human_interface_for_ambient_intelligence.php" target="_blank">RRW suggestion that ambient intelligence is both the Holy Grail of AR and possibly snake oil</a>:</p>
<p><em>&#8220;The holy grail of the mobile AR industry is to find a way to deliver the right information to a user before the user needs it, and without the user having to search for it. This holy grail is likely in a ditch somewhere beside a well-traveled road in the district of the semantic Web, ambient intelligence and the Internet of things. Be wary of any hyped-up invitation to invest in a company that claims to have gotten the opportunity right. What we&#8217;ve seen in the commercial industry to date is a rather complex version of a keyboard, mouse, and monitor.&#8221;</em></p>
<p><em> </em></p>
<p>So Holy Grail, Snake Oil, or a ditch somewhere&#8230;.?</p>
<p><strong>Brian: Â I instantly think of Minority Report, where Tom Cruise&#8217;s character is being bombarded with holographic ads personalized with his name and to his current situation. Â In the future, Spam is a nightmare, especially when it knows who you are. Â I think the key thing here is delivering &#8220;the right information&#8221;, and we still dont have that down. I do see a day where we can truly customize what comes to us, how we want it, when we want it. Â My future vision of ambient intelligence is the ability to &#8220;turn everything off&#8221; if I want to&#8230; block out the stimuli and replace it with images of nature, or natural surroundings, etc. Â Where I live in Los Angeles, we have those digital billboards everywhere, so it&#8217;s like advertising overload wherever you look (hints of Blade Runner). Â I personally don&#8217;t mind them, but I know there is great debate on there being simply too many billboards everywhere. So AR would only add to the noise of life by adding yet another digital overlay of information, right? </strong></p>
<p><strong>Perhaps the holy grail is to use technology to filter things out. AR might become a solution to leading a simpler life, or a perfectly customized life if you want that. Ultimately the control needs to be with the individual. Â I guess I am talking about something like TiVo taken to the extreme.</strong></p>
<p><strong>Tish:</strong> And then that other biggy &#8211; augmented reality search! I am asking this next question ofÂ  <a href="http://www.wikitude.org/" target="_blank">Wikitude</a> and <a href="http://sekaicamera.com/" target="_blank">Sekai </a>camera too and now I must also ask <a href="http://www.acrossair.com/" target="_blank">Acrossair</a> and several others I guess! Obviously a huge area of opportunity in this broader landscape that uses location-awareness, barcode scanners, image recognition and augmented reality is to harness the collective intelligence &#8211; a whole new field of search. There is the beginning of a discussion on this <a href="http://www.ugotrade.com/2009/08/19/everything-everywhere-thomas-wrobels-proposal-for-an-open-augmented-reality-network/" target="_blank">in the comments here</a>.</p>
<p>What will it take, in your view, to become a leader in augmented reality search?</p>
<p><strong>Brian: Â I&#8217;m more of a content guy, so I tend to focus on things like UI, quality of creative, etc.. Â From that perspective, I am looking forward to evolving beyond the &#8220;post-it&#8221; text overlay user-experience we see now in AR search. I was impressed with the TAT Augmented ID concept and hope we start seeing more smart design solutions like that emerging in the space. Â There are some great new design approaches coming out of the location-aware space that should be applied to AR search. I&#8217;ve been studying the heads-up display designs being used in video games, and re-watching movies like Iron Man for ideas. This is another example where Hollywood has painted a polished picture of what AR can and should look like, and the masses have already accepted these design approaches. Â So from that perspective, from my view the leaders in search will be delivering sexy, smart and simple solutions. It&#8217;s all about the S&#8217;s.</strong></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/08/30/games-goggles-and-going-hollywood-how-ar-is-changing-the-entertainment-landscape-talking-with-brian-selzer-ogmento/feed/</wfw:commentRss>
		<slash:comments>7</slash:comments>
		</item>
		<item>
		<title>Augmented Reality &#8211; Bigger than the Web: Second Interview with Robert Rice from Neogence Enterprises</title>
		<link>http://www.ugotrade.com/2009/08/03/augmented-reality-bigger-than-the-web-second-interview-with-robert-rice-from-neogence-enterprises/</link>
		<comments>http://www.ugotrade.com/2009/08/03/augmented-reality-bigger-than-the-web-second-interview-with-robert-rice-from-neogence-enterprises/#comments</comments>
		<pubDate>Mon, 03 Aug 2009 23:24:12 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Android]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[Metaverse]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[MMOGs]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[online privacy]]></category>
		<category><![CDATA[open metaverse]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[privacy and online identity]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[virtual communities]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[websquared]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[AR Platform for Platforms]]></category>
		<category><![CDATA[ARConsortium]]></category>
		<category><![CDATA[ARToolkit]]></category>
		<category><![CDATA[Augmented Reality Browsers]]></category>
		<category><![CDATA[augmented reality platforms]]></category>
		<category><![CDATA[augmented reality SDKs]]></category>
		<category><![CDATA[augmented reality toolsets]]></category>
		<category><![CDATA[Dr Chevalier]]></category>
		<category><![CDATA[Gavin Starks]]></category>
		<category><![CDATA[Google Wave]]></category>
		<category><![CDATA[Green Tech AR]]></category>
		<category><![CDATA[Imagination AR Engine]]></category>
		<category><![CDATA[iphone and augmented reality]]></category>
		<category><![CDATA[iphone augmented reality]]></category>
		<category><![CDATA[iphone Video API and augmented reality]]></category>
		<category><![CDATA[ISMAR 2009]]></category>
		<category><![CDATA[Layar]]></category>
		<category><![CDATA[Lumus]]></category>
		<category><![CDATA[markerless AR]]></category>
		<category><![CDATA[markers and Webcam AR]]></category>
		<category><![CDATA[Mobile AR]]></category>
		<category><![CDATA[MoMo]]></category>
		<category><![CDATA[nathan freitas]]></category>
		<category><![CDATA[Neogence Enterprises]]></category>
		<category><![CDATA[Ogmento]]></category>
		<category><![CDATA[Robert Rice]]></category>
		<category><![CDATA[Unifeye Augmented Reality]]></category>
		<category><![CDATA[wearable displays for augmented reality]]></category>
		<category><![CDATA[Web Squared]]></category>
		<category><![CDATA[Wikitude]]></category>
		<category><![CDATA[World as a Platform]]></category>
		<category><![CDATA[World Browsers]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=4184</guid>
		<description><![CDATA[I first started talking to Robert Rice, CEO of Neogence Enterprises, Chairman of the AR Consortium, in 2008.Â  Robert was already actively working on creating the worldâ€™s first global augmented reality network.Â  But it took a few months before what Robert had said to me about impending explosion ofÂ  augmented reality into our lives really [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/whowhowhere.jpg"><img class="alignnone size-medium wp-image-4186" title="Questions and Answers signpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/whowhowhere-300x199.jpg" alt="Questions and Answers signpost" width="300" height="199" /></a></p>
<p>I first started talking to <a href="http://www.curiousraven.com/about-me/" target="_blank">Robert Rice</a>, CEO of <a href="http://www.neogence.com/#/home" target="_blank">Neogence Enterprises</a>, Chairman of the <a href="http://docs.google.com/AR%20Consortium"><span>AR Consortium</span></a><span>, in 2008.Â  Robert was already actively working on creating the worldâ€™s first global augmented reality network.Â  But it took a few months before what Robert had said to me about impending explosion ofÂ  augmented reality into our lives really sunk in â€“ â€œthis is going to be much bigger than the Web</span>!,â€ he extolled.</p>
<p>By January, 2009 I was convinced and I posted my first interview with Robert, <a href="http://www.ugotrade.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">&#8220;Is it OMG Finally for Augmented Reality?..&#8221;</a> As I mentioned in the intro, I had recently tried out <a href="http://www.wikitude.org/" target="_blank">Wikitude</a> and <a title="Nat Mobile Meets Social DeFreitas" href="http://openideals.com/" target="_blank">Nathan Freitas&#8217;s</a> grafitti app on the streets of New York City and I was impressed.Â  Now, 7 months later, Augmented Reality hasÂ  not disappointed and there is an explosion of new applications, and the arrival of some of first commercial and practical toolsets, SDKs, and APIs for aspiring developers.</p>
<p>For more on this see my previous post, <a title="Permanent Link to Augmented Realityâ€™s Growth is Exponential: Ogmento â€“ â€œReality Reinvented,â€ talking with Ori Inbar" rel="bookmark" href="../../2009/07/28/augmented-realitys-growth-is-exponential-ogmento-reality-reinvented-talking-with-ori-inbar/">Augmented Realityâ€™s Growth is Exponential: Ogmento â€“ â€œReality Reinvented,â€ talking with Ori Inbar,</a> which is an introduction to my series of interviews with the key players in augmented reality and founding members of the <a href="http://www.arconsortium.org/" target="_blank">ARConsortium</a> &#8211; <a href="http://www.int13.net/en/" target="_blank">Int13</a>, <a href="http://www.metaio.com/" target="_blank">Metaio</a>, <a href="http://www.mobilizy.com/" target="_blank">Mobilizy</a>, <a href="http://www.neogence.com/" target="_blank">Neogence Enterprises</a>, <a href="http://ogmento.com/">Ogmento</a>, <a href="http://www.sprxmobile.com/" target="_blank">SPRXmobile</a>, <a href="http://www.tonchidot.com/" target="_blank">Tonchidot</a>, and <a href="http://www.t-immersion.com/" target="_blank">Total Immersion</a>.</p>
<p>As I mentioned before<span>, </span><a href="http://www.sprxmobile.com/about-us/" target="_blank"><span>Maarten Lens-FitzGerald</span></a><span> of </span><a href="http://www.sprxmobile.com/" target="_blank"><span>SPRXmobile</span></a><span> told me the other day that my first </span><a href="http://docs.google.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank"><span>Interview with Robert Rice</span></a><span>, in January of this year, was a key inspiration for SPRXmobile to get started on the development of </span><a href="http://layar.eu/" target="_blank"><span>Layar â€“ a Mobile Augmented Reality Browser</span></a><span>. Much more on Layar and </span><span>Wikitude</span><span> â€“ world browser in my upcoming interviews with </span><a href="http://www.sprxmobile.com/about-us/" target="_blank"><span>Maarten Lens-FitzGerald</span></a><span> and <a href="http://www.mamk.net/" target="_blank">Mark A. M. Kramer</a>, respectively</span>.</p>
<p>Recently, both Layar and Wikitude earned a mention in the white paper by Tim O&#8217;Reilly and John Battelle, <a href="http://www.web2summit.com/web2009/public/schedule/detail/10194" target="_blank">Web Squared: Web 2.0 Five Years On</a>. Web Squared is essential reading not only because it covers the underlying technological shifts of &#8220;Web Meets World,&#8221; which augmented reality is a vital part of;Â  but, crucially, Web Squared focuses on how there is a new opportunity for us all:</p>
<p><strong>&#8220;The new direction for the Web, its collision course with the physical world, opens enormous new possibilities for business, and enormous new possibilities to make a difference on the worldâ€™s most pressing problems.&#8221;</strong></p>
<p>I am currently working on a post on Green Tech AR which is one of the areas augmented reality can play an important role &#8220;in solving the world&#8217;s most pressing problems.&#8221; Augmented Reality has a lot to offer Green Tech development.Â  As <a href="http://twitter.com/AgentGav" target="_blank">Gavin Starks</a> of <a href="http://www.amee.com/" target="_blank">AMEE</a> said at <a href="http://wiki.oreillynet.com/eurofoo06/index.cgi" target="_blank">Euro Foo in 2006</a>, &#8220;climate change would be much easier to solve if you could see CO2.&#8221;</p>
<p>But really useful Green Tech AR requires still hard to do markerless object recognition (going beyond feature tracking and modified marker recognition), and a tight alignment of media/graphics with physical objects, in addition to a quite a high level of instrumentation of the physical world.Â  And for Green Tech AR to really shine, we are going to need innovators like Robert Rice who are working on, and solving, multiple really hard problems like:</p>
<p><strong> &#8220;</strong><strong>privacy, media persistence, spam, creating UI conventions, security, tagging and annotation standards, contextual search, intelligent agents, seamless integration and access of external sensors or data sources, telecom fragmentation, privilege and trust systems, and a variety of others</strong><strong>.&#8221;</strong></p>
<p>Recently Robert Rice <a id="ph56" title="presented" href="http://www.mobilemonday.nl/talks/robert-rice-augmented-reality/" target="_blank"><span>presented</span></a><span> at </span><a href="http://www.mobilemonday.nl/talks/robert-rice-augmented-reality/" target="_blank"><span>MoMo</span></a><span> Amsterdam. </span> Here is a drawing of him in action (<a href="http://www.flickr.com/photos/wilgengebroed/3591060729/" target="_blank">picture below</a> from <a title="Link to wilgengebroed's photostream" rel="dc:creator cc:attributionURL" href="http://www.flickr.com/photos/wilgengebroed/"><strong>wilgengebroed</strong></a>&#8216;s Flickr Stream).</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/RobertRiceMoMOdrawing.jpg"><img class="alignnone size-medium wp-image-4185" title="RobertRiceMoMOdrawing" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/RobertRiceMoMOdrawing-300x184.jpg" alt="RobertRiceMoMOdrawing" width="300" height="184" /></a></p>
<p>In his Twitter feed Robert Rice ( <a href="http://twitter.com/robertrice" target="_blank">@RobertRice</a> ) Robert reminds us: &#8220;<span><span>By the way folks, what you see out there now as &#8220;augmented reality&#8221; is not what it is going to be in two years.&#8221;Â Â  Robert plans to show the first public demo of his &#8220;platform for platforms&#8221; atÂ  <a href="http://gamesalfresco.com/ismar-2009/ismar-08/" target="_blank">ISMAR 2009</a>. </span></span></p>
<p>Robert is writing up a series of White Papers currently.Â  I got a preview of the first, â€œThe Future of Mobile â€“ Ubiquitous Computing and Augmented Reality.â€Â  Robert points out, <strong>&#8220;AR through the lens of the mobile industry and ubiquitous computing is almost overwhelming compared to AR as marker based marketing campaign.&#8221;</strong></p>
<p>I asked Robert, &#8220;What are the key take-aways for investors interested in the augmented reality field at the moment:</p>
<p><strong><span>&#8220;First, Mobile AR is going to be bigger than the web. Second, it is going to affect nearly every industry and aspect of life. Third, the emerging sector needs aggressive investment with long term returns. Get rich quick start ups in this space will blow through money and ultimately fail. We need smart VCs to jump in now and do it right. Fourth, AR has the potential to create a few hundred thousand jobs and entirely new professions. You want to kick start the economy or relive the golden days of 1990s innovation? Mobile AR is it.</span></strong></p>
<p><strong><span> Donâ€™t be misguided by the gimmicky marketing applications now. Look ahead, and pay attention to what the visionaries are talking about right now. Find the right idea, help build the team, fund them, and then sit back and watch the world change. Also, AR has long term implications for smart cities, green tech, education, entertainment, and global industry. This is serious business, but it has to be done right. Iâ€™m more than happy to talk to any venture capitalist, angel investor, or company executive that wants to get a handle on what is out there, what is coming, and what the potential is. Understanding these is the first step to leveraging them for a competitive edge and building a new industry. Lastly, AR is not the same as last decadeâ€™s VR.&#8221;</span></strong></p>
<p><strong><span><br />
</span></strong></p>
<h3>Talking with Robert Rice</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/RobertRicepic.jpg"><img class="alignnone size-medium wp-image-4195" title="RobertRicepic" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/08/RobertRicepic-201x300.jpg" alt="RobertRicepic" width="201" height="300" /></a></p>
<p><em><a href="http://www.flickr.com/photos/vannispen/3586765514/in/set-72157619022379089/" target="_blank">Picture of Robert Rice</a> at <a href="http://www.mobilemonday.nl/talks/robert-rice-augmented-reality/" target="_blank"><span>MoMo</span></a> from <a href="http://www.flickr.com/photos/vannispen/"><strong>Guido van Nispen</strong></a>&#8216;s Flickr Stream</em></p>
<p><strong>Tish Shute:</strong> So perhaps we better start with an update on state of play with Neogence?</p>
<p><strong>Robert Rice:</strong> Neogence is doing well actually. We don&#8217;t talk much about the fact that we are still a small startup and we face a lot of the usual obstacles related to that and being a small team. Fundraising has been extra difficult, mostly because people are just now beginning to see the potential in AR, but that is still colored by perceptions based on a lot of the gimmicky AR ad campaigns out there. Still, it is better than it was two years ago the idea of an AR startup was a bit of a joke to a lot of VCs we talked to. However, we do have an agreement from a new venture fund in Europe (which we can&#8217;t talk about yet) for our first round of funding, but we don&#8217;t expect to close that for several months.</p>
<p>If all goes well, we hope to debut our first public demo at ISMAR 2009 in Orlando to select individuals and a few press folks. We might release a few viral videos before then that are conceptual and about what we are building in the long run, <span>but that depends on how things go over the next several weeks</span>.</p>
<p>We are also very active in looking for and building strategic partnerships and relationships with other companies, and this is not restricted to the augmented reality or mobile sector. As I have said before, we are looking at this as a long term business venture and the industry as something that will be bigger than the web itself within ten years. We are doing typical contract work and custom AR solutions to keep the cash flow going and build up the corporate resume a bit. So, if you want something done, and better than the stuff you are seeing now with all of the generic &#8220;look at our brand in AR with markers and a webcam&#8221; you should definitely give us a call.</p>
<p style="margin-left: 0pt; margin-right: 0pt;"><strong>Tish Shute:</strong> Just to clarify because most of the recent press has been about browser type AR like Wikitude and Layar which are not in the purist sense AR &#8216;cos they do not have graphics tightly linked to physical world. Neogence, if I am correct, is focused on building a true AR platform in the sense I just described?</p>
<p><strong>Robert Rice: </strong>Hrm, I<span> </span><span> have argued with a few others about the actual definition of AR. Some</span> people prefer a narrow and limiting view (3D overlaid on video), but I think in terms of the market and the end-user, it is better to have a wider definition. In that sense, AR is purely the blend of real and virtual, with or without full 3D overlaid on video. If we go with that, then Wikitude, Layar, Sekai, NRU, and others all fit into the AR definition.</p>
<p>Anyway, you are correct. We are building a true <span>platform for AR, and this is quite different from what others are marketing as AR browser â€œplatforms.â€</span></p>
<p><span>There are a few problems with the â€œAR Browsersâ€ approach that no one seems to be noticing. </span>One is that they are all trying to get people to build new applications for their browsers, when they should be trying to get people to create content that they can share and browse.</p>
<p>Second, someone using Layar is not going to see anything that is designed for Sekai or Wikitude.</p>
<p>Third the experiences are generally for one user. While I love all of these guys and think each of the teams has some real talent on it, the model is flawed until someone using Wikitude can see the same thing that someone using Layar or Sekai camera is seeing (provided they are in the same physical location).</p>
<p><span>While we are working on our own client side technologies that we hope will be useful and integrated with every mobile device and AR browser out there, our core focus is on connecting everything and everyone together, and facilitating the growth of the industry with the tools to create content, applications, and so forth. We want to solve the really difficult technical problems (some of which most people havenâ€™t even considered yet, because of the perspective they are looking at the potential of AR with), and make it easy for everyone else to do the cool stuff. We want to be the facilitators.</span></p>
<p>If you really want an idea of where we are going or some of what has inspired us, you have GOT to read Dream Park, Rainbows End, and The Diamond Age. If you have heard me speak anywhere or read my blog, you know that I am continually suggesting these and others.</p>
<p>Anyway, short answer, yes, we are building a true <span>platform for </span><span>ubiquitous mobile augmented reality, and we are absolutely the first to be doing so</span>.<span> I hope to demo some of this in October at ISMAR, with a full commercial launch next year (10/10/10 at 1010am Hehe, seriously). We will probably launch a website soon for people to start signing up and building a community now (especially if you want in on the beta testing of the whole kibosh).</span></p>
<p><strong>Tish:</strong> So just to clarify,Â  how will Neogence&#8217;s approach differ and fit into theÂ  growing world of Augmented Reality tools that we have now, e.g.,Â  <a href="http://www.hitl.washington.edu/artoolkit/" target="_blank">ARTookit</a>, <a href="http://www.imagination.at/en/?Projects:Scientific_Projects:MARQ_-_Mobile_Augmented_Reality_Quest" target="_blank">Imagination</a>, <a href="http://www.metaio.com/products/" target="_blank">Unifeye</a>?</p>
<p><strong>Robert:</strong> I guess you could say that we are trying to build the infrastructure for the global augmented reality network. This could be viewed as a service, or even a platform for platforms. If Neogence does its job right, anything you create using ARtoolkit, Unifeye, or Imagination would be applications you could <span>ultimately link to, integrate with, or deploy on or through</span>, what we are building, and not be tied to a specific set of hardware, browser, or walled garden.</p>
<p><strong>Tish: </strong><span>You mention Neogence is going to provide a platform for platforms. Without knowing the details that sounds like a lot of centralization which prompts the inevitable question: &#8220;Who owns the data?&#8221; Do you think other AR applications or provid</span>ers would resist a â€œPlatform for Platforms?â€ I know the potential centralization power of Google Wave has already got people talking about these issues (one of the comments in my recent blog post was about how Google Wave protocol may be interesting for a least some parts of augmented reality communication).</p>
<p><strong>Robert:</strong> It really depends on perception and how we end up <span>building it. We arenâ€™t talking about creating a closed system. As far as who owns the data, it depends on what data we are talking about. For the most part, I think that if the end-user creates something, they should own it and have control over it. They should also be able to do what they want with it, independent of everything else. </span></p>
<p><span>This is one thing that proponents of the smart cloud and the thin/dumb client donâ€™t like to talk about. It sounds great on paper, but when you start thinking about it, all that does is strip away power from the end user. Case in pointâ€¦Amazon recently wiped every copy of George Orwell&#8217;s 1984 from all Kindle devices. They claimed they didnâ€™t have rights to distribute/publish it and it was available on accident. The scary thing though, is that they literally went into every kindle out there, found copies, and deleted them.</span></p>
<p><span> How would you like it if Microsoft suddenly decided to delete every copy of Microsoft Office? Or every file that had a .doc extension? That is a huge violationâ€¦we feel like we own what is on our computers. But with the whole cloud thing, your data is at the mercy of whoever is running the cloud servers. No privacy, no ownership, no control. And if the system breaks, all you will have is a pretty dumb device that canâ€™t do much on its own. Now, that isnâ€™t to say that the technical merits and benefits of a cloud model arenâ€™t worth pursuing, they are.</span></p>
<p><span> But I think there needs to be some hybrid model. Donâ€™t dumb down my computer or my smart phone, letâ€™s keep pushing how much these devices can do. We should take full advantage of centralized and distributed systems, but in a hybrid mashup sense. That is what we are pursuing with our AR platform, while trying to protect ownership and intellectual property rights of the end user.</span></p>
<p><strong>Tish: </strong>Earlier today I was telling you how impressed I was by Google Wave &#8211; it is quite mind blowing to experience massively multiplayer real time interaction on what will be an open internet wide platform &#8211; Wave is breaking new ground here and more than one person has mentioned its potential role in AR to me (see <a href="http://www.ugotrade.com/2009/07/28/augmented-realitys-growth-is-exponential-ogmento-reality-reinvented-talking-with-ori-inbar/" target="_blank">the comments to my recent post on Ogmento</a>).</p>
<p>I know you are a strong advocate of this kind of real time shared experience being part of AR.Â  But we are only just beginning to see it emerge via Wave on the existing web &#8211; what will it take to have this kind of real time shared experience in AR!Â  We got briefly into the thick client, thin client, cloud versus P2P discussions &#8211; what is your approach to delivering a massively shared real time experience that is like Wave not confined to a walled garden?</p>
<p><strong>Robert:</strong> I&#8217;<span>m not a fan of any of those models as being stand alone or mutually exclusive. Again, the hybrid model with the best of both worlds is key. In the early stages of the emerging industry, you are likely to see some walled gardens (or perhaps a walled garden of walled gardensâ€¦). </span></p>
<p><span>No one knows how things are going to turn out in the next five to ten years and few people are thinking about it actively. For us though, I favor Alan Kayâ€™s quote (pardon the paraphrasing): â€œTo accurately predict the future, invent itâ€. Thatâ€™s what we are doing. In the short term, there will be plenty of experimentation in the industry and a lot of model testing.</span></p>
<p><strong>Tish: </strong>Do you think though Wave protocols might be useful as at least part of the picture for AR standards?Â  As you point out open standards and open protocols are going to be vital for shared experiences of AR.Â  Is it important to build off existing protocols to get the ball rolling and what do you see as being the important early protocols for AR?</p>
<p><strong>Robert:</strong> I think for now, we will use a lot of existing protocols for communications and whatnot, as well as the usual standards for things like 3D models, animation, and so forth. This is only natural. However, as the industry and technology evolves, we will need entirely new ones. As far as I know there is no existing market standard for anything like the Holographic Doctor from Star Trek Voyager, and that type of thing is definitely in the pipeline for the future (sooner than you would think).</p>
<p><strong>Tish:</strong> All the excitement at the arrival of the browser like mobile reality developments has been really great &#8211; I feel people are getting a taste for what it means to compute with anyone/anything, anywhere and and anytime.</p>
<p>Wikitude started the ball rolling. And with Wikitude.me it is the first to support user generated content. Now there is Layar, Sekai Camera also. But as you mentioned to me in an earlier chat, with Layar and Wikitude opening up &#8220;their are probably half dozen other apps coming out in short order with similar functionality (even the AR twitter thing has some similarities).&#8221;</p>
<p>What has been most exciting to you about these developments up to this point? What will these apps/platforms need to do to stand out in a crowd.Â  Up to now, these browser like AR experiences do nothing with close by objects. Do you see &#8220;world browsers&#8221; with near object recognition coming out in the near future. Could Wikitude do this with an integration of SRengine or Imagination?</p>
<p><strong>Robert:</strong> Yes, Wikitude<span> or Layar could do this (integrate with something else for &#8220;near&#8221; AR) and it would be a step in the right direction. Tagging things in the real world is the basic functionality that will grow from text tags to photos, videos, 3D objects, and all sorts of other types of data and meta data. This gets really fun when that data is generated by the object itself. First is just giving people the ability to tag something and share that tag with their friends, everything else grows from that. This sort of functionality is probably the most exciting in terms of near future advancement.</span></p>
<p><span>However, I think the idea of a stand-alone</span> browser platform is a bit awkward&#8230;unless you also consider firefox a website browser platform. After all, you can create widgets (applications) for it. Anyway, the point is having access to the same data&#8230;if you put three people in a room, one for each browser, they should see and experience the same content, although the interface might be different (based on what browser and of course which hardware they are using). This means there needs to be some communication between whatever servers they are storing their data on (meaning, user tags) and some standard for how those tags are created.</p>
<p>Of course, if all they are doing is grabbing the GPS coordinates of the nearest subway station and telling you how far it is and in what direction, then they should all be able to see the same thing, regardless of the platform. But then, that isn&#8217;t really interesting is it? I could get the same info on a laptop with google maps.</p>
<p>This is part of the problem right now though&#8230;no one seems to be thinking about the bigger picture much. All of the effort is either on making the next cool ad campaign for a car or a movie, or creating a tool to tell you where the nearest thingamajig is, but in a really cool fashion on a mobile device.</p>
<p>No one is talking much about filtering data, privilege systems, standards, third party tools, interoperability, and so on. There is also little conversation about where hardware is going. Right now everyone is developing software based on what hardware is available. This needs to change where hardware is being developed to take advantage of new software coming out (this happened in the PC industry a while back and growth accelerated dramatically).</p>
<p>These are some of the reasons why I led the effort to start the AR Consortium. We brought CEOs from 8 different AR companies and startups together to start talking about these issues. We are still getting organized and have plans to expand the membership to other companies, but we want to do this right and we aren&#8217;t rushing things. The important thing is that we have started and there is at least a line of communication open now, where there wasn&#8217;t before.</p>
<p>I would expect to see the early movers expanding what they offer very soon, and they will probably lead the way in the short term. Definitely keep an eye on the companies involved in the AR Consortium. There are lots of very smart and motivated people there, and they are far ahead of all the experimental dabbling in AR we are beginning to see on youtube, twitter, and elsewhere.</p>
<p><strong>Tish: </strong>When we had a discussion about what were the basics for an AR platform and an AR browser earlier, you talked about the difference between tools, a platform, and a AR browser &#8211; like Wikitude and Layar which should be about  features/functionality e.g. to create treasure hunts AR geocaching, invisible AR yellow sticky notes you can leave at restaurants you don&#8217;t like, etc. Also you noted it should let you explore (browse) multiple formats, and open content content for AR &#8211; any data, information, or media that is linked to something in the real world and the visualization/interaction with the same.</p>
<p>Wikitude<span> is a stepping stone to a true browser by your definition. But are we also seeing what you would define as an AR platform emerging â€“ Unifeye, Wikitude (you can recap your definition if you like too)?</span></p>
<p>I think Wikitude hopes to provide the lego blocks forÂ  augmented reality readers, browsers, applications, tools, andÂ  platforms?</p>
<p><strong>Robert:</strong> I expect some segmentation among the various AR companies that are out now, as they find their individual strengths and focus on them. Some will emphasize the client software (the browser), others will develop robust tools for creating content, SDKs/APIs will advance and facilitate rapid development of applications, etc. Neogence is ultimately working on the glue in the middle that ties everything together, makes it massively multiuser, persistent, and ubiquitous. Things like Unity3D have the potential to fill a need in the middleware space.</p>
<p><strong>Tish:</strong> I know <a href="http://www.ugotrade.com/2009/06/12/mobile-augmented-reality-and-mirror-worlds-talking-with-blair-macintyre/" target="_blank">Blair McIntyre</a> (see my interview with Blair here) and others are using Unity3D as an AR client, Could Unity3D become increasingly important?</p>
<p><strong>Robert:</strong> It has the potential to become a favored middleware for providing the rendering layer. It already works nicely in regular browsers, and on several mobile platforms. Why code all the graphics rendering stuff from scratch when you can just license something and extend its features with AR functionality?</p>
<p><strong>Tish:</strong> Now to ask your own question back to you! There seems to be a lot of reason to think that, eventually, there will be the kind of access to the iphone video API that augmented reality really requires and by that I mean more than we will get with OS 3.1 which is rumored to deliver only about half of what we really need for AR on the iphone &#8211; &#8220;not truly useful when you want to align video. with graphics.&#8221;Â  So:</p>
<p><em>&#8220;The iphone&#8230;future or failure? Seemingly anti-developer stance regarding augmented reality, and only a sliver of the global market share. Are we letting the short term glitz of Apple and the iPhone fad pull us in the wrong direction? Shouldnt we be focusing on symbian devices that have the lion&#8217;s share of the market? or should we be looking more at either other OSs (winmobile, android) or not at all and trying to create a new platform that is more MID and less smart phone with a hardware partner?&#8221;</em></p>
<p><strong>Robert:</strong> Apple and the iphone are a bit problematic right now. There is no way I can go to a venture capitalist (at least in North America) and say hey we are building awesome AR applications for winmobile or symbian&#8230;they would either laugh or they simply wouldn&#8217;t get it. There is this false perception that the iphone is the ultimate mobile device, it is the sexiest, and the only thing that people want. Everyone wants a demo on the iphone, the media is mostly interested in iphone developments, and the apple fanatic market could give a fig about other devices. Other devices may have a larger market share or even better hardware, but we have to focus on the iphone right now at least in the demo stage to get any market attention and traction worth the time and effort.</p>
<p>In the future though, unless Apple changes its stance with their SDK and APIs, and starts adding hardware that is key for mobile AR (beyond what is there now), the market will move on without them. <span>This is a really easy decision to make given Apple&#8217;s draconian policies and the fact that their percentage of the global market is miniscule. The smart companies are looking at the whole picture and not putting all of their eggs in the Apple basket.</span></p>
<p>Of course, once the wearable displays are commercially viable everything changes. Wearable computers with small screens or even no screens are going to be what everyone wants. The interface will go from handheld touch screens to virtual holographic interfaces that you interact with using your bare hands.</p>
<p>So for now, <span>(the immediate short term), </span>its all about the iphone. Taking mobile ubiquitous AR to the global market and building for the future will be based on something else. Hardware risks becoming a commodity or a closed platform. Do you really want to buy the Apple iGlasses and only see AR content that is compatible, where your best friend has a pair of WinGlasses and sees something entirely different? No. The hardware, and the client software (what people are calling the ar browser now) will become common and it won&#8217;t matter what brand you use, they will all be accessing the same content.</p>
<p>But at least for the forseeable future, we are building software for specific hardware, and the sexiest mobile on the block is the iphone. The second someone comes out with something much better and the paradigm shifts (software driving hardware instead of vice versa) everything changes.</p>
<p><strong>Tish:</strong> How is the quest for sexy AR eyewear going.Â  I know we were checking out <a href="http://www.masunaga1905.jp/brand/teleglass/" target="_blank">the Japanese eyewear</a> with Adam Johnson from <a href="http://genkii.com/" target="_blank">Genkii</a> just now.Â  For the Neogence project &#8211; as you are going for a fully developed model of AR doesn&#8217;t this necessitate going beyond the iphone and getting the hardware companies moving on the eyewear?</p>
<p><strong>Robert:</strong> The guys making wearable displays really need to get off the pot and stop paying lip service to mobile AR. If they don&#8217;t do something quick, I,Â <span> and others, are</span> going to be scouring the planet looking for someone capable of building the lightweight stylish wearable displays with transparent lenses we are begging for. We aren&#8217;t going to be waiting around for hardware anymore. The AR Pandora&#8217;s box has been opened. I should note that many of us (AR Consortium members) have had less than pleasant experiences or communications with the half dozen companies or so that are making wearable displays. Either their visual design is terrible, the materials feel flimsy, the field of view is limited, or the companies are preoccupied with other business and government contracts. Any attention to the growing AR market is an afterthought and in a few cases condescending. AR is going to be a billion dollar industry in a very short time, and these guys are just leaving money on the table. If they were smart, they would be begging the CEOs from the AR Consortium to fly out to their offices and collaborate on building a pair of wicked sick glasses. The smart phone manufacturers should be doing the same thing, but I have to say that they at least seem to have some ambition and zeal to create better devices, so I can&#8217;t really complain too much there.</p>
<p>Anyway, to answer the rest of your question, we have to assume that the hardware guys, especially regarding the eyewear, is going to take a long time to develop and release the things we need for the ultimate AR experience. So, our goal is to start building things now for what is available. That means scaling things down and handicapping what AR can do, so it works on the &#8220;sexy&#8221; iphone. The important thing though is to start creating applications -now- so when the glasses are commercially available, there will be a wealth of content for people to access and use on day one.</p>
<p>As long as Apple isn&#8217;t playing nice,<span> </span>it is going to hurt everyone. <span>Is it any surprise that they shut down Google Voice? </span> There is a huge opportunity for someone to step up and leapfrog the rest of the industry. Give us the hardware and we will create amazing software for it. Don&#8217;t compete with the iphone, surpass it.</p>
<p><strong>Tish: </strong>What is the state of play of current AR technology and toolkits?</p>
<p><strong>Robert:</strong> The current crop of AR technology and toolkits is absolutely critical for this stage of the industry, and everyone should be leveraging it as much as possible. I talk down marker and image based tracking a lot, but I also like to point out that it is the necessary baseline that the industry is going to be built on. The problem is that there is only so much you can do with marker driven apps, and as creative people and marketing types start conceptualizing about all sorts of cool stuff for the future, they risk setting the expectations too high. It is one thing to show someone the future, it is another to say this is the future and its happening right now. This is why I cringe everytime I see a conceptual video presented as &#8220;our product DOES this&#8221; instead of &#8220;our product WILL DO this.&#8221; <span>Something that simple can still cause the butterfly effect of raising expectations too high and contribute to overhyping.</span></p>
<p><strong>Tish: </strong>One of the things that seems very exciting about the new <a href="http://ogmento.com/" target="_blank">Ogmento</a> partnership is that experienced content producersÂ  <a id="squu" title="Brad Foxhoven" href="http://www.blockade.com.nyud.net:8080/about/about-blockade" target="_blank">Brad Foxhoven</a> and <a id="odvk" title="Brian Seizer" href="http://brianselzer.com/">Brian Selzer</a> from <a id="xow_" title="Blockade" href="http://www.blockade.com/" target="_blank">Blockade</a> are now taking a leading role in AR.Â  What are the most exciting directions for content that you see emerging for AR in the next 12 months?</p>
<p><strong>Robert:</strong> Virtual (well, augmented) pets, and multiuser mobile AR games (2-4 people) are probably going to lead in the next 12 months for content. Easy, accessible, engaging.</p>
<p><strong>Tish: </strong>And are you at Neogence also involved in content partnerships?</p>
<p><strong>Robert:</strong> Yes, we are in the process of finalizing some content partnerships with an eye for long term relationships. We are specifically looking for partners that want to find substantive ways to leverage AR technology, and not use it as a superficial gimmick or attraction that wears off after five minutes. I&#8217;m still cringing over the Proctor &amp; Gamble Always campaign with AR.</p>
<p><strong>Tish:</strong> So back to your observation about some of the tricky problems re creating a true global massively multiuser, ubiquitous, mobile AR platform &#8211; what are some of the main obstacles to this mission in our view? (aside from getting investment!)</p>
<p><strong>Robert:</strong> Trying to explain it to people. The technical problems we can handle or have already solved. But trying to communicate what exactly we are doing is still tough. Not because it is overly complicated, but rather because it is so new and different. People are having a hard time grasping augmented reality beyond marker/webcam.</p>
<p><strong>Tish: </strong>Which AR tools are most important right now?</p>
<p><strong>Robert:</strong> Content is critical right now to show what the technology is capable of and to continue building the presence of augmented reality in the public mind the big benefit to integrated / unified platforms now is speed of development for content. I think that the flash artoolkit = papervision is rocking the planet right now. It is accessible, easy to learn, and lets people create something very quickly. More tools and middleware are coming out and this increases options for designers and developers.</p>
<p><strong>Tish: </strong>What are your favorite papervision apps?</p>
<p><strong>Robert: </strong>Hrm, I don&#8217;t have a favorite papervision app just yet, although I think the tech is solid. I expect to see a lot of stuff built on that platform in the near future. Especially as more ad agencies get on the bandwagon and start telling their IT guys to learn how to program flash so they can make something. Have you seen www.ronaldchevalier.com Not so much for the actual AR stuff, but because the whole thing is just brilliant. Its exactly like some cult figure spiritual guru would do with AR. I wish I had thought of it first actually. This is probably one of the best -seamless- implementations of AR in marketing where it fits&#8230;it isn&#8217;t just jammed in there for the sake of saying they used AR.</p>
<p><strong>Tish:</strong> Do you think Apple is going open the iphone to the full potential of augmented reality anytime soon &#8211; a lot of expectations have been raised?</p>
<p><strong>Robert:</strong> Apple is like that guy has a party at his house and owns this really awesome state of the art home theater in his basement, but makes everyone watch a movie in the living room on a regular TV with a VCR.</p>
<p>They need to get over themselves and quit being a wet blanket. Otherwise, we are taking the beer and pizza we brought, and going to someone else&#8217;s house. <span>Sorry, the Apple thing is a bit of a sore point with me.</span></p>
<p><strong>Tish:</strong> But will people leave all that candy and soda at the appstore?</p>
<p><strong>Robert:</strong> I tell you what though, there is an opportunity for certain mobile phone manufacturers to give me a call and start talking to Neogence and the other members of the Consortium. We have some ideas and specs that could have a radical impact on the mobile market and stuff the IPhone in a box. Hint hint.</p>
<p><strong>Tish:</strong> So what is your vision for the ARconsortium.Â  I know it kicked off with a letter to Apple about the video API.Â  What is the next step? There was a lot of hope that this year would be big for MIDs but this really hasn&#8217;t happened yet &#8211; do you think there is hope for a MID take off despite the lousy economy?)</p>
<p><strong>Robert: </strong>MIDs? No, not yet. smart phones are too lucrative and too hot. It isn&#8217;t time yet for the MID to go mainstream. For that to happen, there needs to be a driving need (cough ubiquitous AR cough)</p>
<p>The AR consortium is mostly an informal affiliation. I expect that representatives from each member will probably meet at every significant conference to catch up over drinks. We are also going to be planning for our own members conference at least once a year. That will happen after we expand the membership though.</p>
<p>The main idea behind the consortium though was to open up a channel of communication between the CEOs so we could work together on standards, solving problems, collaborating, forming some partnerships, and using the collective to bang on the doors of companies like Apple and others. There is power in a group.</p>
<p><strong>Tish:</strong> You mentioned there is a whole long conversation we can have about getting the eyewear.Â  As you point out true AR eyewear changes everything.Â  Can give a little road map of where this has to go?</p>
<p><strong>Robert: </strong>There are essentially four or five main approaches, depending on whether or not you make the lenses special or if they are just plain. You would normally want them to be plain so people with prescription lenses wouldn&#8217;t have problems and would have the option to switch them out. Some types use a more prismatic approach for top down projection, or a corner piece mounts lasers and bounces them off the lens into the eye.Â  Another approach is embedding OLEDs or something else into the lenses themselves.</p>
<p>I really like the <a href="http://www.lumus-optical.com/" target="_blank">Lumus</a> approach, but their product design isn&#8217;t quite there yet. If the wearables don&#8217;t look cool, people won&#8217;t use them. To be honest, if I had the money, I&#8217;d probably ask the Art Lebedev guys to design them based on someone else&#8217;s optical engineering. They designed the <a href="http://www.artlebedev.com/everything/optimus/" target="_blank">optimus maximus</a> old keyboard&#8230;Â Â  brilliant industrial designers, loaded with engineers too. If these guys couldn&#8217;t build the glasses and make them look damn bad ass, I&#8217;d be shocked. Heck, I bet they could build the next gen MID while they were at it.</p>
<p><strong>Tish: </strong>Getting the hardware innovation and software innovation feeding into each other would be really great.</p>
<p><strong>Robert</strong>: Absolutely.</p>
<p><strong>Tish</strong>: That would push the eyewear forward too wouldn&#8217;t it?</p>
<p><strong>Robert:</strong> All it takes is one, and then the competitive landscape would fire right up.</p>
<p><strong>Tish:</strong> What applications would the accurate gps enable?</p>
<p><strong>Robert:</strong> Everything. for example, you know exactly where the phone is and where it is facing, that means you can put it on a table and hit a button, then move it somewhere else and do the same thing in a few minutes, you have a nearly accurate &#8220;mental&#8221; model of the whole place now you go back and start dropping virtual flower pots everywhere.</p>
<p>This is one area where I think the smart phone guys are missing the boat and taking the cheap route. It is possible to have very accurate GPS (down to a six inch area) with better chips and firmware, but it is cheaper to stick in old tech. Most apps today dont need that hyper accuracy, so they aren&#8217;t bothering. Mobile AR though, thats a different story.</p>
<p>With that level of accuracy, you would know exactly where the mobile device is, so all you would need to know is the direction it is facing (orientation), and you could solve one of the problems with registering exactly where 3D objects and augmented media is (it is more complicated than I am describing it, but we don&#8217;t need to get into that much detail here). You wouldn&#8217;t need markers anymore.</p>
<p><strong>Tish: </strong> Isn&#8217;t Wikitude doing this with Wikitude.me their tagging app.?</p>
<p><strong>Robert:</strong> Not really. That type of approach is on a very large scale using the accelerometers compass and GPS to determine where you are and what is in the distance. They (and others like Layar) don&#8217;t handle &#8220;near&#8221; AR. They effectively poll your GPS and then check a database to see what is nearby and what degree/distance it is and then they draw a representation on the screen. They don&#8217;t even need a mobile device&#8217;s camera at all.</p>
<p>Even if they did things up close, its still based on finding landmarks or on things that are broadcasting their location. For example, if they were standing near me, they might get &#8220;robert, 37 degrees, 15 meters away&#8221; but they wouldn&#8217;t be tracking me exactly as I walk around or have the ability to overlay graphics on ME.</p>
<p><strong>Tish:</strong> I retweeted your <a title="#ar" href="http://twitter.com/search?q=%23ar">#ar</a> marketing using ARToolkit + flash (markers/webcams) = Photoshop pagecurl  &lt;six months. Bad design kills innovation. I know you like <a href="http://ronaldchevalier.com/" target="_blank">Dr Chevalier </a>though!Â  What are some of the other AR marketing projects that you like. What would you like to see in terms of innovation in the next 6 months?</p>
<p><strong>Robert:</strong> The marker/webcam approach is already becoming overused and cliche (tremendously fast). Older readers will remember the ubiquitous photoshop page curl that adorned nearly every website and graphic on the internet back in the day. It was horrible. Yes, the Dr. Chevalier stuff cracks me up.</p>
<p>I want to see some big companies or ad agencies really try to do something different with AR, preferably mobile. Take some risks, do something different. Don&#8217;t follow the crowd. Innovation? I want to see some wearable displays with transparent lenses, I want a mobile device specifically designed for ubiquitous AR, I want to see some experimenting with AR in the green tech sector, and I&#8217;d like to see someone get that GiFi wireless technology from that researcher in Australia and jam it into a smart mobile. I would also like my flying car and lunar vacation now, thank you. It is almost 2010 and no one has found that black obelisk yet.</p>
<p><strong>Tish:</strong> So a few closing thoughts! What do you see as the next big thing? Hopes for the ar consortium?Â  Biggest bstacle for commercial AR?Â  And what is the coolest thing you have seen this year?!</p>
<p><strong>Robert:</strong> The next big thing is what I&#8217;m working on hahaha. I hope the AR Consortium will grow and be the active catalyst in making AR mainstream, practical, and world changing.</p>
<p>The biggest obstacle is making sure that the right funding finds the right developers to develop the right technology and create kick ass applications.</p>
<p>The coolest thing I&#8217;ve seen this year would probably be <a href="http://vimeo.com/5595869 " target="_blank">the facade projection stuff</a> (see below): Now, imagine that, but without the projector. Thats part of what I envision for AR in the future.</p>
<p><object classid="clsid:d27cdb6e-ae6d-11cf-96b8-444553540000" width="400" height="225" codebase="http://download.macromedia.com/pub/shockwave/cabs/flash/swflash.cab#version=6,0,40,0"><param name="allowfullscreen" value="true" /><param name="allowscriptaccess" value="always" /><param name="src" value="http://vimeo.com/moogaloop.swf?clip_id=5595869&amp;server=vimeo.com&amp;show_title=1&amp;show_byline=1&amp;show_portrait=0&amp;color=&amp;fullscreen=1" /><embed type="application/x-shockwave-flash" width="400" height="225" src="http://vimeo.com/moogaloop.swf?clip_id=5595869&amp;server=vimeo.com&amp;show_title=1&amp;show_byline=1&amp;show_portrait=0&amp;color=&amp;fullscreen=1" allowscriptaccess="always" allowfullscreen="true"></embed></object></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/08/03/augmented-reality-bigger-than-the-web-second-interview-with-robert-rice-from-neogence-enterprises/feed/</wfw:commentRss>
		<slash:comments>20</slash:comments>
		</item>
		<item>
		<title>Composing Reality and Bringing Games into Life: Talking with Ori Inbar about Mobile Augmented Reality</title>
		<link>http://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/</link>
		<comments>http://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/#comments</comments>
		<pubDate>Wed, 06 May 2009 14:50:30 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[CurrentCost]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[HomeCamp]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[Kids With Cameras]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[MMOGs]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[Mobile Technology]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[open source]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[social gaming]]></category>
		<category><![CDATA[social media]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Virtual Meters]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[Adam Greenfield]]></category>
		<category><![CDATA[Add new tag]]></category>
		<category><![CDATA[alternate reality games]]></category>
		<category><![CDATA[alternative reality gaming]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[AR]]></category>
		<category><![CDATA[AR eyewear]]></category>
		<category><![CDATA[AR goggles]]></category>
		<category><![CDATA[ARToolkit]]></category>
		<category><![CDATA[augmented reality games]]></category>
		<category><![CDATA[augmented times]]></category>
		<category><![CDATA[Better Place]]></category>
		<category><![CDATA[Blair Macintyre]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[Caryatids]]></category>
		<category><![CDATA[Come Out and Play]]></category>
		<category><![CDATA[composing reality]]></category>
		<category><![CDATA[Cory Doctorow]]></category>
		<category><![CDATA[eyewear for augmented reality]]></category>
		<category><![CDATA[game development conference]]></category>
		<category><![CDATA[Games Alfresco]]></category>
		<category><![CDATA[games for preschoolers on the iphone]]></category>
		<category><![CDATA[games on the iphone]]></category>
		<category><![CDATA[GDC 2009]]></category>
		<category><![CDATA[GE augmented reality ad]]></category>
		<category><![CDATA[google earth]]></category>
		<category><![CDATA[green technology]]></category>
		<category><![CDATA[image recognition]]></category>
		<category><![CDATA[Immersive augmented reality]]></category>
		<category><![CDATA[Int 13]]></category>
		<category><![CDATA[iphone]]></category>
		<category><![CDATA[iphone games]]></category>
		<category><![CDATA[iPhone OS 3]]></category>
		<category><![CDATA[iphone versus the android]]></category>
		<category><![CDATA[ISMAR]]></category>
		<category><![CDATA[ISMAR 2009]]></category>
		<category><![CDATA[jane mcgonigal]]></category>
		<category><![CDATA[julian Bleeker]]></category>
		<category><![CDATA[Kati London]]></category>
		<category><![CDATA[Kweekies]]></category>
		<category><![CDATA[Loopt]]></category>
		<category><![CDATA[markerless AR]]></category>
		<category><![CDATA[markerless augmented reality]]></category>
		<category><![CDATA[Microsoft Tag]]></category>
		<category><![CDATA[mobile augmented reality]]></category>
		<category><![CDATA[mobile gaming]]></category>
		<category><![CDATA[Mobile Reality]]></category>
		<category><![CDATA[Netweaver]]></category>
		<category><![CDATA[open source augmented reality]]></category>
		<category><![CDATA[Ori Inbar]]></category>
		<category><![CDATA[Pookatak]]></category>
		<category><![CDATA[Pookatak Games]]></category>
		<category><![CDATA[reality experiences]]></category>
		<category><![CDATA[RFID]]></category>
		<category><![CDATA[Robert Rice]]></category>
		<category><![CDATA[Rouli Nir]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[Shai Agassi]]></category>
		<category><![CDATA[smart environments]]></category>
		<category><![CDATA[smart objects]]></category>
		<category><![CDATA[The End of Hardware]]></category>
		<category><![CDATA[the Pong for augmented reality]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[Tish Shute]]></category>
		<category><![CDATA[Tonchidot]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[ubiquitous augmented reality]]></category>
		<category><![CDATA[ubiquitous experience]]></category>
		<category><![CDATA[virtual reality]]></category>
		<category><![CDATA[WARM 09]]></category>
		<category><![CDATA[Wattzon]]></category>
		<category><![CDATA[Where 2.0]]></category>
		<category><![CDATA[WikiMouse]]></category>
		<category><![CDATA[Wikitude]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3448</guid>
		<description><![CDATA[Recently, I talked to Ori Inbar (above), formerly senior vice- president at SAP.Â  Ori is on a mission to make augmented reality commercially successful not in 5, 10, or 15 years, but now. Ori is the founder of Pookatak Games &#8211; a video game company, &#8220;with a vision to upgrade the way people experience the [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/oriinbarpost.jpg"><img class="alignnone size-medium wp-image-3449" title="oriinbarpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/oriinbarpost-300x199.jpg" alt="oriinbarpost" width="300" height="199" /></a></p>
<p>Recently, I talked to <a href="http://gamesalfresco.com/">Ori Inbar</a> (above), formerly senior vice- president at <a href="http://www.sap.com/">SAP</a>.Â  Ori is on a mission to make augmented reality commercially successful not in 5, 10, or 15 years, but now. Ori is the founder of <a href="http://gamesalfresco.com/about/" target="_blank">Pookatak Games</a> &#8211; a video game company, <strong>&#8220;with a vision to upgrade the way people experience the world.&#8221;</strong> Ori will be participating May 20th, in<a href="http://en.oreilly.com/where2009/public/schedule/detail/7197" target="_blank"> O&#8217;Reilly&#8217;s Where 2.0 panel, &#8220;Mobile Reality</a>&#8221; -Â  an event not to be missed IMO.</p>
<p>The taste for computing anywhere anytime has entered human culture via the iphone and is spreading like chocolate cake and pizza at a preschool party (see <a href="http://gamesalfresco.com/2009/03/23/gdc-2009-why-the-iphone-just-changed-everything/" target="_self">why the iPhone changed everything</a>).Â  And while the full flowering of the next step is yet to come &#8211; computing anywhere, anytime by anyone and <strong>anything </strong><a href="http://en.wikipedia.org/wiki/Internet_of_Things" target="_blank">(&#8220;the internet of things&#8221;</a>), our love for these first devices capable of being <strong>mediating artifacts for ubiquitous computing</strong> (Adam Greenfield) is a vital first step to free us from our tethers to computer screens, and fulfill the promise of augmented reality.</p>
<p>If you need more convincing on the pivotal role augmented reality will play as the web moves into the world, check out Tim O&#8217;Reilly&#8217;s recent comments in <a id="iz1_" title="this video clip on Augmented Times" href="http://artimes.rouli.net/2009/04/tim-oreilly-on-recognition-rfid-and-web.html" target="_blank">this video clip posted on Augmented Times</a> and <a id="wtf4" title="here" href="http://radar.oreilly.com/2008/02/augmented-reality-a-practical.html" target="_blank">here</a> early last year.</p>
<p>From another perspective, the gloomy specter of economic and environmental catastropheÂ  is driving a movement to &#8220;<a id="h5pf" title="infuse intelligence into the way the world work's&quot;" href="http://news.bbc.co.uk/2/hi/technology/7992480.stm" target="_blank">infuse intelligence into the way the world work&#8217;s.&#8221;</a> But the challenge for a smart planet is not just about making environments smart, it is about using smart environments to enable people to act smarter (<a href="http://www.ugotrade.com/2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/" target="_blank">see my interview with Adam Greenfield</a>).</p>
<p>We need a rapid upgrade in both the way the world works, and the way we experience the world.</p>
<p>((Note:Â  It is time to read (if you haven&#8217;t already) <a href="http://search.barnesandnoble.com/The-Caryatids/Bruce-Sterling/e/9780345460622" target="_blank">Bruce Sterling&#8217;s Caryatids</a> (<a href="book of the year for 2009" target="_blank">Cory Doctorow&#8217;s book of the year for 2009</a>) &#8220;as a software design manual&#8221; (<a href="http://www.nearfuturelaboratory.com/2009/03/17/design-fiction-a-short-essay-on-design-science-fact-and-fiction/" target="_blank">see Julian Bleeker</a>) because Caryatids reveals the Gordian knots of human folly, greed, compassion and desire entwined in near future designs for technologies to save the world.))</p>
<p>Ori Inbar, worked with Shai Agassi (Shai is now leading the world changing <a id="v5ow" title="Better Place" href="http://www.betterplace.com/" target="_blank">Better Place</a> ) driving <a id="gf_5" title="Netweaver" href="http://en.wikipedia.org/wiki/NetWeaver" target="_blank">Netweaver</a> from a mere concept to a &#8220;major, major business for SAP.&#8221; So Ori has already been through the cycle of working in a very small startup and growing it into a billion dollar business.Â  He has both the experience and the passion to realize his vision for augmented reality.</p>
<p>At Pookatak, he explains :</p>
<p><strong>&#8220;We design â€œreality experiencesâ€ that make usersâ€™ immediate environments more significant to them. We wish to free young and old from getting lost in front of the screen. By delivering the worldâ€™s information to peopleâ€™s field of view, and by weaving real world objects into interactive narratives, we help people rediscover the real world.&#8221;</strong></p>
<p>Pookatak will release their first game this summer. Currently it is under wraps. But Ori gives us some glimpses of what is to come in the interview below.</p>
<p>In addition to founding Pookatak, Ori is involved in a broader effort to move augmented reality forward. On his blog, <a id="ie5s" title="Games Alfresco" href="http://gamesalfresco.com/" target="_blank">Games Alfresco</a> &#8211; he recently welcomed <a href="http://gamesalfresco.com/about/" target="_blank">a new partner, Rouli Nir</a>, Ori has focused his eye of wisdom on every significant recent advance in Augmented Reality (check out <a id="zr9y" title="this essence of Ori's thinking in a fast paced video" href="http://gamesalfresco.com/2009/03/09/augmented-reality-today-ori-inbar-speaks-at-warm-2009/" target="_blank">this essence of Ori&#8217;s thinking in a fast paced video</a> presentation for <a href="http://gamesalfresco.com/2009/02/12/live-from-warm-09-the-worlds-best-winter-augmented-reality-event/" target="_blank">WARM â€˜09</a>).</p>
<p>Also Ori is one of the organizers of the interactive media track at <a id="b-c6" title="ISMAR 2009" href="http://www.ismar09.org/" target="_blank">ISMAR 2009</a>.Â  At ISMAR this year, Ori explained,<strong> &#8220;we are trying to bring in people that develop interactive experiences for consumers, beyond the traditional attendees coming from a research perspective.</strong>&#8221;</p>
<p>In the interview below, Ori explains much of his thinking on how augmented reality will become commercially successful.Â  Enjoy it, think about it, and share it. And most importantly, if you can, get involved with ISMAR 2009.</p>
<p>OriÂ  has inspired me to participate in <a id="seky" title="ISMAR" href="http://www.ismar09.org/" target="_blank">ISMAR</a> this year.Â  Ori pointed out:</p>
<p><strong>The </strong> <a href="http://campwww.informatik.tu-muenchen.de/ismar09/lib/exe/fetch.php?id=ismar09%253Astart&amp;cache=cache&amp;media=ismar09:ismar09-cfp_090211_final.pdf" target="_blank">call for papers</a> <strong>is on, and this year it targets well beyond the typical research papers audience and into interactive media and art folks. </strong></p>
<p><strong>There are plenty of opportunities such as:</strong></p>
<p><strong>Art Gallery</strong></p>
<p><strong>Demonstrations</strong></p>
<p><strong>Tutorial</strong></p>
<p><strong>Workshops</strong></p>
<p>It&#8217;s a huge opportunity to shape the emergence of augmented reality.<br />
<br /></br></p>
<h2><strong> Interview With Ori Inbar</strong></h2>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-41.png"><img class="alignnone size-full wp-image-3479" title="picture-41" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-41.png" alt="picture-41" width="107" height="146" /></a><br />
<h3>Making Augmented Reality Commercially Successful</h3>
<p><strong>Tish Shute: </strong>You are considered a key trail blazer in AR and you have the go to blog for augmented reality!Â  What are the most important lessons you have learned researching, writing, and developing AR in the last couple of years?</p>
<p><strong>Ori Inbar: You need to have a vision. You need to know where this is going to go in ten or fifteen or twenty years. But you&#8217;ve got to start with something really simple that makes use of the technology you have on hand. And do something that is practical, that people will like, and something they would actually want to buy. Its as simple as that. I&#8217;m currently looking at what we could do with existing technology. First of all, you have to put it in front of people. Right now most people have never heard about the term augmented reality. Go into the street, and ask 100 people about it, maybe 2 would know about it. So you need to put it in front of people because most people think it&#8217;s still science fiction or a special effect you see in movies, not something you can experience in real life. </strong></p>
<p><strong>Tish: </strong>It seems to me to that for augmented reality applications to become popular with existing technology the key breakthrough would be getting people to hold up their phones. What are the obstacles to getting people to use their mobile devices like this?</p>
<p><strong>Ori: There&#8217;s a really nice cartoon by </strong><em> </em><strong><a href="http://www.tonchidot.com/">Tonchidot</a> (below) &#8211; the Japanese company behind the Sekai Camera. It&#8217;s an illustration showing the evolution of man, from ape to man (holding a cell phone looking down), to the developed man holding a device like a camera &#8211; in front of its eyes.</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-37.png"><img class="alignnone size-medium wp-image-3454" title="picture-37" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-37-300x221.png" alt="picture-37" width="300" height="221" /></a><strong></strong></p>
<p><strong>Which is exactly what you&#8217;re talking about. People ask, &#8220;are people going to walk with this like that all day long?&#8221; Probably not. I mean you have to build it in a way that doesn&#8217;t require them to hold it like that all the time. People are used to this gesture with the ubiquitous digital cameras. I tested one of my prototypes on a two and a half year old girl. She had no problem holding it just like she holds a camera.<br />
</strong><br />
<strong>Tish:</strong> <a href="http://www.cc.gatech.edu/~blair/home.html" target="_blank"> Blair MacIntyre</a> mentioned, &#8220;The problem with the mobile phone as a AR device is a problem of awareness,&#8221; i.e., you have to have a way of letting people know when there&#8217;s something interesting wherever they are. One of the issues regarding this is if you get too many alerts, then you tune them out.</p>
<p><strong>Ori: First of all Blair is one of the people in academia that get it. Because he looks at it from an experience perspective. Not just as an interesting technical problem to solve. Let&#8217;s start with getting people to enjoy this new experience. The AR demos so far were mostly eye candies, and mostly for advertising &#8211; the<a href="http://ge.ecomagination.com/smartgrid/#/landing_page" target="_blank"> GE AR ad</a> created a lot of buzz; but you look at it for 10 seconds and you forget about it.Â  You need to build something that people would want to experience over time and would be willing to pay for. I think that&#8217;s the big test, right?</strong></p>
<p><strong>Now in terms of having a ubiquitous experience where you&#8217;re continously connected, it doesn&#8217;t have to be an overwhelming experience. Just like some of the social media tools we&#8217;re using today, we decide when to connect, and we filter out the trash. You could get alerts only for things that really matter to you, not for everything that happens in your immediate environment. </strong></p>
<p><strong>There will be many layers of information, and it&#8217;ll be up to you to pick the ones you want to experience. The real benefit is that you get the information in your own field of view and in context of where you are or what you do.</strong></p>
<p><strong>Tish:</strong> So what are you working on these days?</p>
<p><strong>Ori: We are working on a little app that targets a very different audience than what you&#8217;d expect: pre schoolers. We think we can encourage them to get away from a PC or TV screen and learn something while playing &#8211; in the real world. You&#8217;ll hear more about it as soon as this summer. Nuff said.</strong></p>
<p><strong>But, it is a small application that will run on the iPhone. People ask how many pre-schoolers own iPhones? Well, their parents do. </strong></p>
<p><strong>Tish:</strong> Yes there are certainly many New York kids with iPhones &#8211; my kid now has my old iphone.Â  He has pretty much switched from playing games on his DS to the iPhone. I noticed in your WARM video you place a big emphasis on AR as something that will get kids away from screens and engaged with reality.Â  This is something parents will approve of!</p>
<p><strong>Ori: Yes I saw something really interesting at my kids&#8217; party one day; they were all sitting around the room &#8211; looking down at their own DS screens.Â  You could play the DS anywhere, but kids would usually play it on the sofa, looking at the screen, isolated from the world. With an iPhone and a camera, and the application we&#8217;re producing, reality becomes part of the game. Yes that makes it all of a sudden much more interesting for parents. Because kids are spending so much time in front of the screen, all of a sudden they&#8217;re something that will encourage them to interact with real objects, real things. Every parent I&#8217;ve talked to loves that idea.</strong></p>
<p><strong>Tish:</strong> Yes that is what is cool about the work of <a href="http://www.katilondon.com/" target="_blank">Kati London</a> &#8211; I think I saw someone say this on Twitter, &#8220;Kati puts the computer in the game not the game in the computer.&#8221;</p>
<p><strong>Ori: Yes, kids are spending more time in front of games and the computer because it&#8217;s more interesting. It captivates them with &#8220;<a id="x_z0" title="game pleasures" href="http://8kindsoffun.com/">game pleasures</a> &#8221; that tap into their brain&#8217;s dopamine circuitry &#8211; constantly seeking reward and satisfaction. So you&#8217;re not going to be able to tell them to go back to playing in reality without these pleasures. We have to study these mechanics from games and bring them into reality. It&#8217;s about programming real life; and augmented reality helps you achieve that.</strong></p>
<p><strong>Here&#8217;s an example: cause and effect; in a game when you do something you always get an immediate effect. You&#8217;re good, you get a reward. You&#8217;re not good, you get a cue to improve. In real life you do things and you could wait 2 or 3 years until you actually get feedback (if you&#8217;re lucky). Augmented Reality allows you to bring these mechanics into the real world. I think that&#8217;s going to help kids rediscover reality, in a new sense, which is what every parent is dreaming about.</strong></p>
<p><strong>Tish:</strong> I don&#8217;t know how much you can say about your app. But in regard to doing augmented reality on the iPhone.. there&#8217;s no compass. Is this a limitation?</p>
<p><strong>Ori: True, no compass yet. But the camera gives you a lot of information that you can interact with. When you run the application, you see the world in front of you, and if the app can recognize real life objects &#8211; it can put virtual elements on top of it.</strong></p>
<p><strong>Tish:</strong> But not with any accuracy unless you&#8217;re using markers. Are you using markers?</p>
<p><strong>Or</strong><strong>i: We&#8217;re using natural feature recognition. It doesn&#8217;t have to be an ugly looking marker. It can be any image.</strong></p>
<p><strong>Tish:</strong> So you&#8217;re using image recognition. Are you working with one of these image recognition startup companies (<a id="nws6" title="list here" href="http://www.educatingsilicon.com/2008/11/25/a-round-up-of-mobile-visual-search-companies/" target="_blank">list here</a> )?</p>
<p><strong>Ori: We&#8217;re working with one of those. What&#8217;s unique about it is it runs very nicely on any cell phone, and on the iPhone it works the best. For this first app, it doesn&#8217;t really matter where you are physically; the geolocation is not part of the experience. </strong><span style="background-color: #ffff00;"><br />
<strong><br style="background-color: #ffffff;" /></strong><span style="background-color: #ffffff;"><strong>Tish: </strong> For a truly engaging AR experience we will need more of a backend than is currently available?</span><br />
</span><br />
<strong>Ori: I call the backend the cloud, where you have all this information and ways to access it from anywhere. Actually I think it&#8217;s become pretty mature today. If you look at the different elements required to enable an augmented reality experience to work, you have &#8211; first &#8211; the user whose always in the center. Then you have the lens. The lens can be an iPhone, or glasses, even a projector. The lens allows you to watch, sense and track information in the real world: people, places, things. Then in the backend you have the cloud where you store and retrieve information.</strong></p>
<p><strong>So if you look at the maturity of these different elements, I think the cloud is in pretty good shape. Because there&#8217;s so much information we&#8217;re collecting and storing. Anything from Google, Wikipedia, Facebook, all that kind of stuff, it&#8217;s a lot of useful information you can access from anywhere using APIs. And a lot of it is also starting to include geolocation information. Take <a id="zhag" title="Loopt" href="http://www.loopt.com/" target="_blank">Loopt</a> or Google&#8217;s <a href="http://www.google.com/latitude/intro.html" target="_blank">friends service</a> that allows you to see where your friends are and what they&#8217;re doing. There&#8217;s tons of information out there and it&#8217;s pretty easy to access it. Now what do you do with it is the question?</strong></p>
<p><strong><a href="http://www.mobilizy.com/wikitude.php" target="_blank">Wikitude</a> is such a simple and brilliant application and nobody thought about doing it until this guy from Salzburg did. It doesn&#8217;t have any sophisticated visual tracking. It knows your position and it&#8217;s simply looking at the angle you&#8217;re pointing to. Based on these parameters it brings information from Wikipedia that pertains to your field of view. So most of it was already there. It&#8217;s just a matter of connecting the pieces in an experience that is valuable for people.</strong></p>
<p><strong>Tish: </strong>It is the uptake of even a very simple technology that puts the magic in it.</p>
<p><strong>Ori:Â  Yes, take Twitter. If you go to its homepage it looks like a very simple boring app but it is something that is both enjoyable and very useful to people.</strong></p>
<h3><strong>Why you should participate in ISMAR 2009</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-40.png"><img class="alignnone size-medium wp-image-3478" title="picture-40" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-40-222x300.png" alt="picture-40" width="222" height="300" /></a><br />
<strong>Tish: </strong>I know that you are involved in organizingÂ  <a id="seky" title="ISMAR" href="http://www.ismar09.org/" target="_blank">ISMAR</a> (picture above from Ori&#8217;s post on <a href="http://gamesalfresco.com/2009/02/23/ismar-2009-the-worlds-best-augmented-reality-event-wants-you-to-contribute/" target="_blank">&#8220;ISMAR 2009: The World&#8217;s Best Augmented Reality Event&#8230;,</a>&#8220;) and there is a call out for papers and for volunteers, can you tell me more about it?</p>
<p><strong>Ori: Yes, we hope to have the first ISMAR where we practice what we have just discussed: let&#8217;s build on all the research invested so far and instead of thinking only about 5-10 years from now, let&#8217;s see what we can do today. So we are bringing people in from other disciplines &#8211; artists, interactive media developers and people from the entertainment industry.Â  The goal is to use the technology to make something interesting for people &#8211; again, something that people would buy, and making it commercially successful.Â  Many people either don&#8217;t know about ISMAR because in the past it was a pure engineering-orientated event and peopleÂ  from a commercial perspective of AR weren&#8217;t attracted to it.Â  The Chair of the Event this year is based in Florida and he is going to bring in a lot of people from the entertainment industry such as Disney. I think this will transform this event into something more like SIGGRAPH &#8211; more of an industry event.Â  As one of the organizers of the interactive media track we are trying to bring in people that want to build applications for consumers.</strong></p>
<p><strong>Tish:</strong> In terms of AR applications what are the flagships today?</p>
<p><strong>Ori: There are very few because it&#8217;s just the beginning. There&#8217;s one tiny studio in France called <a id="z1ln" title="Int 13" href="http://www.int13.net/en/" target="_blank">Int 13</a> . They&#8217;ve created maybe the first commercial game running on a mobile device using AR technology. It&#8217;s called <a href="http://www.youtube.com/watch?v=Te9gj22M_aU" target="_blank">Kweekies</a>. It was one of the contenders for the Nokia Mobile innovation awards. They were one of the ten finalists, but they didn&#8217;t win it. It&#8217;s looks really cool. It&#8217;s somethng that runs on your desk, with a marker. Many AR folks say markers are the past, markers are ugly. But it&#8217;s still a cool experience. I think people will go for it.</strong></p>
<p><strong>Tish:</strong> Yes I think we will have to look to small companies that are free to think creatively to lead the way.Â  It seems many games companies are tied up pulling off huge big budget projects and enterprise is still catching up on how to use social media!</p>
<p><strong>Ori: Yes, last year I was in the game development conference (GDC); there was no mention of augmented reality &#8211; not on the exhibition floor, none of the sessions, nobody talked about it. I was stunned. Then this year, there was a little a change. There were like three demos on the exhibition floor, <a href="http://www.metaio.com/" target="_blank">Metaio,</a> <a href="http://www.vuzix.com/home/index.html" target="_blank">Vuzix</a> and a Dutch company called <a href="http://www.augmented-reality-games.com/" target="_blank">Beyond Realit</a>y.Â  And then there was Blair&#8217;s talk, which was very very cool. The room was packed with people. And after the talk there were dozens of people lining up to talk with him about the topic. There was definitely interest, but still on the very edge. The video game industry is still a hit driven business and publishers spend upward of 20-30 million dollar to create the best AAA game possible. They just can&#8217;t take the risk. So it&#8217;s going to come from smaller companies, from outsiders coming in with a vision and understanding on how to put the AR pieces together to create a totally new experience.</strong></p>
<p><strong>Tish:</strong> But the basic tool set is there isn&#8217;t it?</p>
<p><strong>Ori: I talked to some folks at the games developer conference, many folks with MMO background, and they have great ideas about AR. It&#8217;s great to see different people with different views on what&#8217;s needed first. &#8220;Joe the Programmer&#8221; had this idea of creating a small piece of hardware that you can put in every house and provide accurate geospatial information in your home. That couldÂ  open up many opportunities for AR experiences in homes.</strong></p>
<p><strong>Tish:</strong> Don&#8217;t you think we have enormous resources in terms of image databases that provide a great basis for augmented reality.Â  I was talking to Aaron Cope at ETech about <a href="http://code.flickr.com/blog/2008/10/30/the-shape-of-alpha/" target="_blank">The Shape of Alpha</a> &#8211; Flickr&#8217;s vernacular mapping project using all the geotagged photos in Flickr. That is such cool project. <a href="http://en.oreilly.com/where2009/public/schedule/speaker/43824" target="_blank">Aaron will be speaking at Where 2.0</a> also.</p>
<p><strong>Ori: Think of Google Earth. Google Earth leveraged communities to basically map all the major cities around the world into 3D models. And that is an essential step to be able to do augmented reality outdoors. Because if you had to model everything from scratch, it wouldn&#8217;t be realistic.</strong></p>
<h3><strong>Augmented Reality and Becoming Greener.</strong></h3>
<p><strong>Tish:</strong> I am really interested in how AR interfaces might be useful to some of the emerging energy identity/metering projects like <a href="http://www.amee.com/" target="_blank">AMEE</a> and <a href="http://www.wattzon.com/" target="_blank">WATTZON</a> because I think it is very important that people have very intuitive, immediate, and enjoyable ways to relate to energy data so they can make greener choices.</p>
<p><strong>Ori: Back in the day I had an idea to build an Augmented Reality application to become greener. You look at things around your home with the camera and itÂ  recognizes its green gas footprint and makes recommendations to reduce it.Â  I guess it was a bit too early to do that based on visual recognition alone&#8230;you&#8217;d needÂ  additional sensors that would provide related information about what you are looking at.</strong></p>
<p><strong>Tish:</strong> Well as there is more interest in Green technology do you think we may see VC interest in some green AR projects now?</p>
<p><strong>Ori: I talked to some of the investment folks, Angels as well as VC&#8217;s about AR and they had no clue what it is. There&#8217;s a need for a whole lot of education. And there are no proof points (as in successful investments in this domain), and counter to popular belief &#8211; they don&#8217;t like risk so much&#8230;</strong></p>
<p><strong>Tish:</strong> And consumer adoption must lead the way, right?</p>
<p><strong>Ori: Just like with every emerging technology in history, people never bought the technology, they bought the content, the apps, the benefits that came on top of the technology. Whether it was VHS winning over Beta Max, or BluRay winning over HD. It&#8217;s always because of more/better content. Look at the video game console war: Xbox, and Nintendo did better than Sony just because they had more and better games. Even Windows was a success thanks to its applications. People bought it for the applications not the OS. The content is the first to drive demand.</strong></p>
<p><strong>Tish:</strong> One of the challenges to giving people new ways to relate to their energy consumption is that you can just have them looking at graphs of how bad they have been in the past you &#8211; that may make them feel bad but that doesn&#8217;t necessarily give them ways or motivation to change. There perhaps needs to be more immediate relationship to the data to facilitate change. I think the mantra for optimization of anything from energy usage to supply chains is timely, actionable data?</p>
<p><strong>Ori: There are a lot of ideas about measuring information and displaying it to people. For example, the Prius hybrid car, one of its interesting features &#8211; which is kind of game like &#8211; is a constant display of your current fuel consumption. That alone changes how people drive because they try to beat the &#8220;Score&#8221; and as a result conserve more fuel. That model can be applied to our homes&#8230;</strong></p>
<p>Tish: Yes that is something I am very interested in. I have been following several projects in this area &#8211; one of my favorites is the <a href="http://www.arduino.cc/" target="_blank">Arduino</a>, <a href="http://www.currentcost.com/" target="_blank">Current Cost</a>/<a href="http://www.ladyada.net/make/tweetawatt/" target="_blank">Tweetawatt</a>, <a href="http://www.pachube.com/" target="_blank">Pachube</a> integrations <a href="http://www.ugotrade.com/2009/04/24/homecamp-2-home-energy-management-and-distributed-sustainability/" target="_blank">I saw at Homecamp</a>.</p>
<p>You joined a start up with Shai Agassi which was bought out by SAP right? He has a brilliant approach with Better Place.</p>
<p><strong>Ori:Â  I think what&#8217;s really unique about Better Place&#8217;s approach is that he doesn&#8217;t require people to change their behavior. People are still going to have their own cars. They&#8217;ll be able to drive as far as they want, and for the same (or lower cost). Its not necessarily about a new technology, electric cars have been around for a long time but there was no way people were going to be limited by the 50 or 70 mile range and Better Place is solving that problem. With its infrastructure of charging spots and battery switching stations, drivers are going to be able to drive anywhere. And it&#8217;ll be similar to having to stop once in a while to refuel your car. The price maybe even lower than what you pay today for your transportation needs &#8211; and you&#8217;ll stop generating green gas. It&#8217;s a clever way of taking technology to a whole new level without changing the behavior of people.</strong></p>
<p><strong>Tish: </strong>Better Place is a classic example of things as a service isn&#8217;t it?Â  It is basically a utility company.</p>
<p><strong>Ori: It is similar to a phone carrier model.Â  You pay for a membership that gives you access to the car (equivalent to the phone) and electricity (equivalent to the phone line) for the same price of fuel cost today. And as bonus you get to save the world.</strong></p>
<h3><strong>How the iphone changed the game for AR &#8211; and the iphone versus Android</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-38.png"><img class="alignnone size-medium wp-image-3472" title="picture-38" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/picture-38-300x198.png" alt="picture-38" width="300" height="198" /></a><em></em></p>
<p><em>Picture from Ori&#8217;s post</em><strong><em>, <a href="http://gamesalfresco.com/2009/03/23/gdc-2009-why-the-iphone-just-changed-everything/" target="_blank">&#8220;GDC 2009: Why the iphone changed everything&#8221; </a></em></strong></p>
<p><strong>Ori: And back to AR, you have to take the same approach, because nobody&#8217;s wants to don those huge head mounted displays or backpacks. You have to take advantage of people&#8217;s current behavior: they already carry their iPhones or similar devices.</strong></p>
<p><strong>Tish:</strong> As we discussed, you just have to get people raising up their phones and looking through them when that is a useful thing to do. Both Wikitude and Nathan Freitas&#8217;s graffiti app were enough to get me interested in the evolutionary step of raising my phone! Nathan&#8217;s graffiti app is nice. You leave a marker for your graffiti so other people can find view/add their own &#8211; a nice primal experience like pissing on the lamp post to let your pack know where youâ€™ve been.Â  Also the graffiti app taps into a long history ofÂ  NYC street culture around tagging and graffiti art (see my interview, <a href="http://www.ugotrade.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">&#8220;Is it OMG finally for Augmented Reality?&#8221;</a>).</p>
<p><strong>Ori: The app store has fundamentally changed the mobile gaming industry. Last year they were in shambles. There was no growth. Everybody was complaining, &#8220;we can&#8217;t handle it, there&#8217;s a million phones, and you have to test it on each phone. And carriers suck, they don&#8217;t care about sharing and promoting your content. Everything was bad. This year mobile gaming is the hottest thing. And it&#8217;s all because of the iPhone. It changed the game.</strong></p>
<p><strong>Tish: </strong>How do you think Android is going to get traction against the iphone?</p>
<p><strong>Ori: Well the number one thing is the form factor &#8211; the iPhone is just much cooler than the G1. Its OK but it doesn&#8217;t have the same feel. People thought it was going to be easy to clone the iPhone but none of the attempts succeeded so far.</strong></p>
<p><strong>Tish: </strong>How much does it matter for AR not being able to runs things persistently in the background on the iphone?</p>
<p><strong>Ori: Actually they have add a such a capability in OS 3.Â  You can now make use of a background service.</strong></p>
<p><strong>Tish:</strong> OS 3 will open up new possibilities for AR?<strong> </strong></p>
<p><strong>Ori: The access to the video API is still not public.Â  But there is a new Microsoft application &#8211; Microsoft Tag that makes use of that API which means it is probably OK to use it.</strong></p>
<p><strong>Tish: </strong>(I ask Ori for his card and he shows me how to read it with my iphone.) Oh nice you have an AR card, of course!</p>
<h3><strong>In Search of Pong for Augmented Reality</strong></h3>
<p><strong>Tish: </strong>So how will AR begin to, as Blair&#8217;s friend put&#8217;s it, &#8220;facilitate a killer existence,&#8221; particularly as we are probably looking at some new and perhaps pricey hardware?</p>
<p><strong>Ori: You could take the Better Place approach. We&#8217;re going to give you a great experience and we&#8217;ll include the devices as part of that experience for the same price. Let&#8217;s say you subscribe to an AR experienceÂ  which offers access to multiuser, support, and all the information you need wherever you go &#8211; exactly according to the vision. You pay for a subscription on a monthly basis and included in that cost we give you a better device that offers aÂ  better AR experience. It&#8217;s following the phone carrier approach, but in a good way.</strong></p>
<p><strong>But first of all we do need our Pong! I was sitting with a couple of AR game enthusiasts at the GDC and we were asking ourselves, &#8220;how do we create the first pong for AR?&#8221;</strong></p>
<p><strong>Was Pong a multiplayer game? Not necessarily! Did it connect to the network? No! We have to create the first dot in a long line of dots that will bring us to our destination.</strong></p>
<p><strong>Tish: </strong>You haven&#8217;t seen a Pong yet have you?</p>
<p><strong>Ori: Not yet. I mean there&#8217;s maybe a handful of games and apps out there, but I don&#8217;t think any of them is a Pong yet. Still, it&#8217;s getting closer.</strong></p>
<p><strong>Tish: </strong>Kati London is doing some very interesting work on bringing games into reality, isn&#8217;t she?</p>
<p><strong>Ori: Yes, she works with Frank Lanz at <a href="http://playareacode.com/" target="_blank">Area/Code</a>. He teaches at NYU and has designed games for the <a href="http://www.comeoutandplay.org/" target="_blank">&#8220;Come Out and Play&#8221;</a> festival here in Manhattan. And a lot of these games are actually low tech.</strong></p>
<p><strong>Tish:</strong> Yes I have a big alternate reality game blog brewing that I haven&#8217;t had time to write yet!</p>
<p><strong>Ori: The city is the gameboard is their slogan. It&#8217;s going to be a great playground for AR games. The city becomes a theme park. The city could become an even bigger touristic attraction. People will come to the city to be part of these games. So you&#8217;re having thousands of people running around the city playing all sorts of games from laser-tag style to history adventures, to treasure hunts.</strong></p>
<h3><strong>Composing Reality</strong></h3>
<p><strong>Tish: </strong>So why haven&#8217;t you focused on one of these kinds of games with your company?</p>
<p><strong>Ori: We have a couple of scenarios along these lines that we&#8217;re planning for 2010-11. But first focus on what&#8217;s possible today.</strong></p>
<p><strong>Tish: </strong>And what&#8217;s stopping you from doing those kind of games today?</p>
<p><strong>Ori: Many things. The devices are not there yet, location services are not accurate enough, ubiquitous sensors are notÂ  there yet.</strong></p>
<p><strong>Tish: </strong>You think alternate reality gaming needs more &#8220;ubiquity&#8221; than is currently available?</p>
<p><strong>Ori: Not necessarily. People are doing alternate reality games with no &#8220;ubiquity&#8221; at all. But my interest is to add the visual aspect. I believe humans are mostly driven visually.</strong></p>
<p><strong>Jane McGonigal said in a talk at GDC, that AR would allow us to program reality, which is exactly how I look at it. Once you can recognize things, some of it with WiFi and RFID and all sorts of sensors. But visual sensors is always going to be the ultimate way to recognize things. And once you recognize things and know what they are, and can pull information about those things (or people and places) from the internet, you can program it (visually). You could program it to be fictional, like in a video game, or it could be programmed as non-fictional, like a documentary. And that allows you to do things that before were unimaginable.</strong></p>
<p><strong>Tish: </strong>But you can&#8217;t forget the visual, it is primary the connection to peoples&#8217; primary sensory relationships.</p>
<p><strong>Ori: Yes, it&#8217;s like you go to a grocery store and you pick your vegetables, a lot of it is by sight and by touch. And what if you could also see just by looking at it that it&#8217;s from a local store, and that it&#8217;s organic?</strong></p>
<p><strong>Tish:</strong> It goes beyond overlays really?</p>
<p><strong>Ori: By the way, I don&#8217;t like the term &#8216;overlay&#8217;. I know that&#8217;s how it looks: you either overlay or superimpose, but I&#8217;m still searching for a better term. A term I prefer to use is &#8220;composing reality&#8221;. Just like painters, they use brushstrokes and colors and compose a painting. We need to take the real element and the virtual element and compose them into something new. It&#8217;s not just about slapping one on top of the other.</strong></p>
<p><strong>Tish: </strong>yes I think the idea of dashboards is not so appealing.</p>
<h3><strong>Pookatak Games</strong></h3>
<p><strong>Tish: </strong>Do you want to explain the evolution of your company? You have an interesting history of success with high end enterprise applications.</p>
<p><strong>Ori: Since I was a kid I wanted to invent and create things. When I discovered software, that was a really cool way of actually creating things from nothing. From thin air; and you can do it very quickly. That&#8217;s what brought me into software. But I was always looking for the intersection between technology and art. Looking for ways to bring these things together. In the early nineties virtual reality was doing it. It had the appeal of cutting edge technology that can be combined with art. But then, as we all know, it crashed. So I joined Shai Agassi&#8217;s startup (who is now doing Better Place) back in the early nineties. I was one of the first employees in his startup which was developing multimedia products. I was leading the development of one of its flagship product. At some point we realized the technology could be great for an enterprise environment.</strong></p>
<p><strong>It was a really great experience. First going through this cycle from a very small startup and growing into this multi billion dollar business. I was responsible for defining and marketing SAP&#8217;s platform, which was called Netweaver. It was just an idea when we joined SAP and by the time I left it was a major, major business for SAP. I learned about the challenges of building a platform. No matter what purpose you&#8217;re building it for, it typically has similar rules. It&#8217;s definitely not just about the technology; the content that comes with it is really key to making a platform successful.</strong></p>
<p><strong>The third part of this platform trifecta is the community. If you don&#8217;t build a community, you won&#8217;t get the critical mass required for adoption. It may be your own platform but it&#8217;s not necessarily the people&#8217;s platform. That experience is very key to what we&#8217;re doing today. Now, a new industry is being born on the basis of a remarkable technology. But to drive adoption, first we&#8217;ll need good content. The content will be created using today&#8217;s technology with internal tools developed to simplify the process. Next step would be to make the tools used internally &#8211; available to other developers. Help scale the industry, enable innovation on a larger scale. That way we have a chance to create a platform. So it isn&#8217;t really just about my company. I&#8217;m so passionate about augmented reality, I want to it to become a healthy and successful industry for the next 5, 10, 15 years.</strong></p>
<p><strong>Tish: </strong>Yes I am so ready to be liberated from the sitting behind a computing screen! And I know that all this hardware is murdering the environment.</p>
<p><strong>Ori: There&#8217;s &#8216;s the book by Rolf Hainich which is called &#8220;<a id="ba8p" title="The End Of Hardware" href="http://www.theendofhardware.com/">The End Of Hardware.</a> &#8221; It&#8217;s about hardware for augmented-reality. Once you use goggles or other AR interfaces you eliminate the need for screens, laptops, etc. It&#8217;s going to be great for the environment. You have read Rainbow&#8217;s End, right? According to the book in few years there will barely be any (visible) hardware. At least it&#8217;ll have a much smaller footprint for the environment. And it&#8217;ll touch every aspect of life, everything you do. It&#8217;ll change the way you interact with the world.</strong></p>
<h3><strong>The Illusive Eyewear for Immersive AR.</strong></h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/retroar-googlespost.jpg"><img class="alignnone size-medium wp-image-3469" title="retroar-googlespost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/05/retroar-googlespost-300x225.jpg" alt="retroar-googlespost" width="300" height="225" /></a><br />
<em>Friend of Ori&#8217;s in San Francisco wearing retro AR goggles (from <a href="http://gamesalfresco.com/2009/05/04/gdc-2009-roundup-a-tiny-spark-of-augmented-reality/" target="_blank">Games Alfresco, Ori&#8217;s roundup of GDC 2009</a>)</em></p>
<p><strong>Tish:</strong>OK lets talk about goggles.</p>
<p><strong><strong>Ori: Goggles are going to happen, we want to be hands free.</strong></strong></p>
<p><strong>It&#8217;s going to happen because it&#8217;s just a more intuitive way to use this technology. But above all it has to look cool. Because if it&#8217;s not, if it&#8217;s a big headset, then maybe a small percent of the population might use it, but most people won&#8217;t. It has to look like an accessory, like new cool eyeglasses that you just must wear.</strong></p>
<p><strong>I recently talked to a friend, who runs an industrial design firm, and has experience in designing such glasses for companies like Microvision and Lumux. He says that when you try to bring the images so close to our eyes &#8211; there are some really hard problems to solve. Otherwise it can become really annoying and cause dizzyness.</strong></p>
<p><strong>But I&#8217;m optimistic. I believe it&#8217;s going to happen 3 to 5 years from now. It&#8217;s already starting now: Vuzix announced goggles that will be available this year. Some AR apps that are going to take advantage of next year. Initially only a fraction of the population will use it. And that&#8217;s going to help advance it and make it better and better. But it&#8217;s going to take time until it reaches the mass market.</strong></p>
<p><strong>Tish:</strong> In virtual worlds we have seen, I think, a lot of mistakes in terms of reinventing the wheel and producing too many proprietary versions of the same thing and not enough concerted effort on standards and open platforms that could create a vibrant ecosystem.Â  How can augmented reality not make the same mistakes?</p>
<p><strong>Ori: There are some early AR open source efforts ARTookit, ARtag but it is not a movement yet.Â  One of the things we&#8217;re trying to do at ISMAR this year is to put togetherÂ  discussions around key industry issues, such as standards. Some people say it&#8217;s too early, you have to have a defacto standard to start from. But pretty soon it&#8217;s going to be too late. Just like with virtual worlds, all of a sudden you have all these islands that don&#8217;t talk to each other. Why get to that point if we can plan to avoid it? Let&#8217;s start thinking about it right now. On the other front there are devices. There are pockets of people working on adapting devices for AR, second guessing the hardware companies. Why not get them together with the Intels and Nvidias of the world, and discuss what this device should be able to do. And then compete to make it happen.</strong></p>
<p><strong>Tish: </strong>How much luck are you having with this discussion part?</p>
<p><strong>Ori: People are very interested in doing this. We proposed these panels for ISMAR. And I&#8217;ve got some key people already on board. They have tons of input, they want to get involved. We&#8217;ll see how much we can actually get out of it.</strong></p>
<p><strong>Tish: </strong>In virtual worlds it was a while before vibrant opensource communities developed.Â  OpenSim has I think been the breakthrough community in this regard.</p>
<p><strong>Ori: You have to think about the elements up front. The dream job is to architect the industry. Say we agree on the required pieces. Then we could help the right companies succeed in delivering the pieces. Next, we have to collaborate so that these pieces talk to each other. And eventually these communication methods will become defacto standards and most developers will adopt it.</strong></p>
<p><strong>Tish: </strong>So I&#8217;m going to put you in the role. You&#8217;ve got your dream job. You&#8217;re going to architect this community. So what are the key pieces and where would you like to see the open source communities take hold first?</p>
<p><strong>Ori: Open source will not be exclusive. It&#8217;s going to live side by side with proprietary technology.</strong></p>
<p><strong>The key pieces? You have the user at the center. And the user interacts with a lens. The lens includes both the hardware and the software. And then the lens senses and interacts with the world, which includes people, things and places. And these people-things-places emit information &#8211; about who they are, where they are, what they&#8217;re doing, etcÂ  &#8211; which is then stored in the cloud.</strong></p>
<p><strong>And then you have the content providers, the people and companies, composers who weave AR experiences through the pieces we mentioned before. These composers need a platform that glues these pieces together. Pieces of the platform will be on the lens, and in the world, and in the cloud. If you manage to remove the frictions, and connect these pieces into an experience that people like &#8211; then you have a platform. What the platform does it reduces the overhead and accelerates innovation.</strong></p>
<p><strong>Tish: </strong>Another problem virtual worlds faced in their development was their isolation from the world wide web.Â  Will augmented reality avoid this plight?</p>
<p><strong>Ori:Â  Yes, I believe the key, like you said before, is not to reinvent the wheel. The cloud is already there.Â  Take Wikitude for example, all <a href="http://www.mobilizy.com/" target="_blank">Mobilizy</a> had to do is buildÂ  a relatively simple client app, connected to wikipedia, and all of a sudden it offered a wealth of information in your field of view.</strong></p>
<p><strong>I think we can learn a lot from web 2.0. For example, in order to have a ubiquitous experience like <a href="http://www.curiousraven.com/" target="_blank">Robert Rice</a> and others are striving for, you&#8217;ll need to 3d map the world. Google earth like apps are going to help but it is not going to be sufficient. So let&#8217;s leverage people. Google became successful in part by making people work with them.Â  Each time you create a link from your blog to my blog their search engines learn from it.Â  So let&#8217;s find ways to make people create information that can be used for AR.</strong></p>
<p><object width="425" height="344" data="http://www.youtube.com/v/GTXtW3W8mzQ&amp;hl=en&amp;fs=1" type="application/x-shockwave-flash"><param name="allowFullScreen" value="true" /><param name="allowscriptaccess" value="always" /><param name="src" value="http://www.youtube.com/v/GTXtW3W8mzQ&amp;hl=en&amp;fs=1" /><param name="allowfullscreen" value="true" /></object></p>
<p><em>Ori Inbar directed <a title="Wiki Mouse" href="http://www.youtube.com/watch?v=GTXtW3W8mzQ" target="_blank">Wiki Mouse</a> &#8211; a WIKI Film co-created by a swarm of movie makers around the world.</em></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/05/06/composing-reality-and-bringing-games-into-life-talking-with-ori-inbar-about-mobile-augmented-reality/feed/</wfw:commentRss>
		<slash:comments>12</slash:comments>
		</item>
		<item>
		<title>HomeCamp 2: Home Energy Management and Distributed Sustainability</title>
		<link>http://www.ugotrade.com/2009/04/24/homecamp-2-home-energy-management-and-distributed-sustainability/</link>
		<comments>http://www.ugotrade.com/2009/04/24/homecamp-2-home-energy-management-and-distributed-sustainability/#comments</comments>
		<pubDate>Fri, 24 Apr 2009 19:14:16 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Bar Camp]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[culture of participation]]></category>
		<category><![CDATA[CurrentCost]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[HomeCamp]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mirror worlds]]></category>
		<category><![CDATA[OpenSim]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[Virtual HomeCamp]]></category>
		<category><![CDATA[Virtual Meters]]></category>
		<category><![CDATA[Virtual Worlds]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[Add new tag]]></category>
		<category><![CDATA[distributed sustainability]]></category>
		<category><![CDATA[electricity 2.0.]]></category>
		<category><![CDATA[green technology]]></category>
		<category><![CDATA[home energy management]]></category>
		<category><![CDATA[intelligent energy management]]></category>
		<category><![CDATA[living greener]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[sustainable interaction design]]></category>
		<category><![CDATA[TweetaWatt]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3423</guid>
		<description><![CDATA[HomeCamp is a home hacking, automation and green technology community that will be gathering in London tomorrow, Saturday 25th April 2009, 10am until 6pm BST (GMT + 1), and in an OpenSim event running alongside for virtual participation, to brainstorm new possibilities for distributed sustainability, creative smart meters, monitoring, graphing and visulaizing energy usage. More [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-31.png"><img class="alignnone size-medium wp-image-3424" title="picture-31" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-31-299x300.png" alt="picture-31" width="299" height="300" /></a></p>
<p><a rel="nofollow" href="http://homecamp.org.uk/">HomeCamp</a> is a home hacking, automation and green technology community that will be <a href="http://maps.google.co.uk/maps?f=q&amp;source=s_q&amp;hl=en&amp;geocode=&amp;q=65+-+71+Scrutton+Street,+London,+EC2A+4PJ&amp;sll=51.509912,-0.129361&amp;sspn=0.100214,0.30899&amp;ie=UTF8&amp;ll=51.524379,-0.080895&amp;spn=0.006582,0.019312&amp;z=16&amp;iwloc=addr" target="_blank">gathering in London</a> tomorrow, Saturday 25th April 2009, 10am until 6pm BST (GMT + 1), and in an <a href="http://homecamp.pbwiki.com/Virtual-Home-Camp">OpenSim event running alongside for virtual participation</a>, to brainstorm new possibilities for distributed sustainability, creative smart meters, monitoring, graphing and visulaizing energy usage.</p>
<p class="MsoNormal">More details and videos on the <a href="http://homecamp.org.uk" target="_blank">blog.</a> <a href="http://homecamp.pbwiki.com/" target="_blank">The wiki, which includes signup</a>, is the main portal to all the online activity.<a href="http://homecamp.pbwiki.com/"></a></p>
<p>As James Governor notes <a href="http://www.redmonk.com/jgovernor/2009/04/24/homecamp-returns/" target="_blank">here</a>:</p>
<blockquote><p><span lang="EN-GB">there has been a huge amount of code and applications released focused purely on using technology for home energy monitoring and automation.Â  We have an active google group and quite a few videos and content showcasing the various applications and hardware currently being used by geeks to save money and live greener.</span></p></blockquote>
<p><span lang="EN-GB">Now the challenge is to see how this seedling home energy management movement</span><span lang="EN-GB"> can </span><span lang="EN-GB">really grow into widely adopted distributed sustainability solutions that </span><span lang="EN-GB">everyone can use, and participate in.</span></p>
<p>Both <a href="http://www.yellowpark.net/cdalby/index.php/about/" target="_blank">Chris Dalby</a> (<a href="http://www.yellowpark.net/cdalby/index.php/2009/04/23/homecamp-2-is-this-saturday/" target="_blank">see here)</a>, <a href="http://andypiper.wordpress.com/2009/04/24/home-camp-mark-2/" target="_blank">Andy Piper</a>, James Governor of <a href="http://www.redmonk.com/jgovernor/" target="_blank">Monkchips</a> (<a href="http://www.redmonk.com/jgovernor/2009/04/24/homecamp-returns/" target="_blank">see here</a>),Â  and Tom Raftery of <a href="http://greenmonk.net/" target="_blank">GreenMonk</a> (<a href="http://greenmonk.net/homecamp-ii/" target="_blank">see here</a>), have posted on tomorrow&#8217;s <a href="http://homecamp.pbwiki.com/" target="_blank">Ho</a><a href="http://homecamp.pbwiki.com/" target="_blank">meCamp</a> event. So I am just going to add some quick notes, especially to highlight some of what will be going on virtually for those of you, like me, who canâ€™t make it to London.</p>
<p>You can tune in either on the live video ustream, or sign up on <a href="http://reactiongrid.com/">ReactionGrid </a>and join the <a href="http://homecamp.pbwiki.com/Virtual-Home-Camp">OpenSim event</a>. Also, you can keep up on what is happening on Twitter #homecamp. I highly recommend that you catch Tom Raftery&#8217;s talk which will be streamed from Spain live into the London meeting, the OpenSim event on ReactionGrid, and Ustream. Tom Raftery, a leading Green technology analyst at <a href="http://redmonk.com/" target="_blank">RedMonk</a> <a href="http://greenmonk.net/" target="_blank">(see also GreenMonk</a>), will be picking up, in depth, on some themes raised in his brilliant ETech 2009 presentation, <a href="http://en.oreilly.com/et2009/public/schedule/detail/5655" target="_blank">&#8220;Electricity 2.0: Applying the Lessons of the Web to Our Energy Networks.&#8221;</a></p>
<p class="MsoNormal"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/tweetawatt.jpg"><img class="alignnone size-medium wp-image-3425" title="tweetawatt" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/tweetawatt-300x162.jpg" alt="tweetawatt" width="300" height="162" /></a></p>
<p class="MsoNormal">There will be homecampers dropping in to virtual homecamp in ReactionGrid throughout the day, including <a href="http://blogs.ipona.com/chris/" target="_blank">Chris Hart (the awesome &#8220;girl-geek&#8221;@dstrawberrygirl)</a>, <a href="http://mikethebee.mevio.com/" target="_blank">MiketheBee</a>, and <a href="http://www.cminion.com/wordpress/" target="_blank">Cminion</a>, who has a number of cool projects to demo, including <a href="http://www.cminion.com/wordpress/?p=43" target="_blank">his energy turbines</a>.Â  <a href="http://www.gomaya.com/glyph/" target="_blank">Dave Pentecost</a> (pictured above with his <a href="http://twitter.com/tweetawatt" target="_blank">Tweetawatt</a>, <a href="http://www.pachube.com/" target="_blank">Pachube</a> Orb) and I (<a href="http://docs.google.com/Presentation?id=dhj5mk2g_214g48q37hj" target="_blank">see our presentation for EarthWeek SL here</a>) plan to be at Virtual Homecamp on ReactionGrid between 9am and 10.30am EST. Dave has done a number of cool energy monitoring hacks including a <a href="http://www.pachube.com/" target="_blank">Pachube</a> link to and from <a href="http://opensimulator.org/wiki/Main_Page" target="_blank">OpenSim</a>.</p>
<p><span class="title">Also keep your eye on Dave&#8217;s blog, <a href="http://www.gomaya.com/glyph/" target="_blank">The Daily Glyph</a>, for what&#8217;s new in distributed sustainability. Dave just posted some great links on Sustainable Interaction, design</span> and work by ITP researchers and others in sustainable use of technology.</p>
<p><a title="Sustainable Interaction | Main / Papers" href="http://itp.nyu.edu/sustainability/interaction/Main/Papers">Sustainable Interaction | Main / Papers</a></p>
<p><a title="Sustainable interaction design | Sustainable Minds" href="http://www.sustainableminds.com/category/categories/sustainable-interaction-design">Sustainable interaction design | Sustainable Minds</a></p>
<p><a title="Design For the Other 90% | Cooper-Hewitt, National Design Museum" href="http://other90.cooperhewitt.org/">Design For the Other 90% | Cooper-Hewitt, National Design Museum</a></p>
<p class="MsoNormal">If you are in London, look out for Oliver Goh of <a href="http://www.shaspa.com/" target="_blank">Shaspa</a> as Oliver will be at Homecamp in London. As I mentioned in <a href="http://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/" target="_blank">my previous post</a>, Oliver will soon be launching both Shaspa commmunity and enterprise hardware and software packages for &#8220;Intelligent Energy Management.&#8221;</p>
<p class="MsoNormal"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-35.png"><img class="alignnone size-medium wp-image-3428" title="picture-35" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-35-300x229.png" alt="picture-35" width="300" height="229" /></a></p>
<p>For a bit of homecamp history, James Governor (picture below from <a href="http://chinposin.com/home/monkchips" target="_blank">Chinposin)</a>, recapsÂ  some of the successes ofÂ  the first HomeCamp <a href="http://www.redmonk.com/jgovernor/2009/04/24/homecamp-returns/" target="_blank">here</a>.</p>
<p>And last but not least, a big thanks to sponsors, <a href="http://currentcost.co.uk/">CurrentCost</a>, <a href="http://greenmonk.net/">Greenmonk</a>, <a href="http://www.pachube.com/">Pachube</a>, <a href="http://www.onzo.co.uk/" target="_blank">Onzo</a>, and <a href="http://reactiongrid.com/">ReactionGrid</a>,Â  and media partner <a href="http://theattick.tv/" target="_blank">theattick.tv</a> who are making the London and virtual homecamp events possible.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-33.png"><img class="alignnone size-medium wp-image-3426" title="picture-33" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-33-294x300.png" alt="picture-33" width="294" height="300" /></a></p>
<p class="MsoNormal"><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/tweetawatt.jpg"></a></p>
<p class="MsoNormal"><a href="http://homecamp.pbwiki.com/"></a></p>
<p class="MsoNormal">
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/04/24/homecamp-2-home-energy-management-and-distributed-sustainability/feed/</wfw:commentRss>
		<slash:comments>2</slash:comments>
		</item>
		<item>
		<title>Sensor Networks and Sustainability: &#8220;Connecting Real, Virtual, Mobile and Augmented Spaces&#8221;</title>
		<link>http://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/</link>
		<comments>http://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/#comments</comments>
		<pubDate>Sun, 19 Apr 2009 06:32:59 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[3D internet]]></category>
		<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[architecture of participation]]></category>
		<category><![CDATA[Augmented Reality]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[CurrentCost]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[HomeCamp]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[message brokers and sensors]]></category>
		<category><![CDATA[Mixed Reality]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[MQTT and RSMB]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[OpenSim]]></category>
		<category><![CDATA[Paticipatory Culture]]></category>
		<category><![CDATA[realXtend]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Virtual HomeCamp]]></category>
		<category><![CDATA[Virtual Meters]]></category>
		<category><![CDATA[Virtual Realities]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[arduino]]></category>
		<category><![CDATA[Carbon Goggles]]></category>
		<category><![CDATA[distributed sustainability]]></category>
		<category><![CDATA[home energy management]]></category>
		<category><![CDATA[open data]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[sensor networks]]></category>
		<category><![CDATA[sensor networks and sustainability]]></category>
		<category><![CDATA[SHASPA]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[TweetaWatt]]></category>
		<category><![CDATA[Virtual Worlds]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3381</guid>
		<description><![CDATA[Today, I did a presentation, on connecting real, virtual, mobile, and augmented spaces to support sustainability, for Earth Week SL, with Dave Pentecost and Jim Purbrick, who presented on Carbon Goggles. Dave and I focused on sensor networks, open data, Pachube, OpenSim, and sustainability from perspective of, &#8220;hack local, think global.&#8221;Â  Dave and I will [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-21.png"><img class="alignnone size-medium wp-image-3382" title="picture-21" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-21-300x225.png" alt="picture-21" width="300" height="225" /></a></p>
<p>Today, I did a presentation, on <a href="http://docs.google.com/Presentation?id=dhj5mk2g_214g48q37hj" target="_blank">connecting real, virtual, mobile, and augmented spaces to support sustainability,</a> for <a href="http://slearthweek.wordpress.com/2009/04/10/earth-week-press-release-see-schedule-also/" target="_blank">Earth Week SL</a>, with <a href="http://www.gomaya.com/glyph/" target="_blank">Dave Pentecost</a> and <a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick</a>, who presented on <a href="http://carbongoggles.org/" target="_blank">Carbon Goggles</a>.</p>
<p>Dave and I focused on sensor networks, open data,<a href="http://www.pachube.com/" target="_blank"> Pachube</a>,  <a href="http://opensimulator.org/wiki/Main_Page" target="_blank">OpenSim,</a> and sustainability from perspective of, &#8220;hack local, think global.&#8221;Â  Dave and I will be picking up on some of these themes of sensor networks and sustainability next week in our presentation with <a href="http://www.darleon.com/" target="_blank">Dimitri Darras</a> at ITP,Â  NYU, Aprl 24th, 6.30 pm to 8 pm &#8211; <a href="http://itp.nyu.edu/sigs/news/special-event-open-sim/" target="_blank">details here</a>.Â  If you are in New York City, I hope to see you there.</p>
<p>We got some interesting insights into augmented reality from <a href="http://jimpurbrick.com/" target="_blank">Jim Purbrick</a> whose <a href="http://carbongoggles.org/" target="_blank">Carbon Goggles</a> project prototypes how we can use augmented reality to read carbon identity and to combine well organized, verified data from <a href="http://www.amee.com/" target="_blank">AMEE</a> &#8211; a neutral aggregation platform to measure the &#8220;carbon footprint&#8221; of everything on earth, with crowd sourced tagging and linking.</p>
<h3>Shaspa &#8211; &#8220;the sensor network system that has it all&#8221;</h3>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-22.png"><img class="alignnone size-medium wp-image-3391" title="picture-22" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-22-300x224.png" alt="picture-22" width="300" height="224" /></a></p>
<p>We also discussed, recently launched, <a href="http://www.shaspa.com/" target="_blank">Shaspa</a>. Shaspa&#8217;s energy management packages connect spaces &#8211; real, virtual, mobile and augmented.Â  Shaspa has been bloggedÂ  by <a href="http://www.maxping.org/business/real-life/virtual-management-of-energy-consumption-in-the-home.aspx/" target="_blank">Maxping</a> and <a href="http://www.virtualworldsnews.com/2009/04/shaspa-launches-home-energy-organizer-on-opensim.html" target="_blank">Virtual World News</a>, so you can read all about it, but the Shaspa device kit won&#8217;t be available until next week. Some key features of the Home EnergyÂ  package are listed on the slide above.Â  However, this evening, Dave Pentecost and I got a sneak preview of both the Shaspa commmunity and enterprise hardware and software packages from Shaspa founder Oliver Goh. We were pretty impressed.</p>
<p><strong>Dave:</strong> &#8220;<strong>It&#8217;s the ultimate hackable device for energy management!&#8221;</strong></p>
<p><strong>Oliver:</strong> <strong>&#8220;Bring us any sensor device &#8211; with documentation, and within three days we will put a driver into Shaspa.&#8221;</strong></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/daveandoliverpost.jpg"><img class="alignnone size-medium wp-image-3392" title="daveandoliverpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/daveandoliverpost-300x178.jpg" alt="daveandoliverpost" width="300" height="178" /></a></p>
<p>Oliver is on the right and Dave on the left in the picture above. The picture below shows Shaspa in OpenSim. Oliver and I will be attending the <a href="http://www.3dtlc.com/"><span style="color: #810081;">3D Training, Learning and Collaboration</span></a> Conference in Washington, DC, next week.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-23.png"><img class="alignnone size-medium wp-image-3412" title="picture-23" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/04/picture-23-300x208.png" alt="picture-23" width="300" height="208" /></a></p>
<h3>Links</h3>
<p>Here are some of the links that came up in the presentation as many people asked for them to be published. Dave also has them on <a href="http://www.gomaya.com/glyph/archives/002520.html#002520" target="_blank">his blog</a>.</p>
<p>SLIDES on GOOGLE DOCS:<br />
<a title="Earth Week SL Presentation, April 18th, 2009 - Google Docs" href="http://docs.google.com/Presentation?id=dhj5mk2g_214g48q37hj">Earth Week SL Presentation, April 18th, 2009 &#8211; Google Docs</a></p>
<p><a href="http://www.ugotrade.com/2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">Pachube, sensor networks</a></p>
<p><a href="http://www.gomaya.com/glyph" target="_blank">Dave&#8217;s blog covering Maya archaeology, jungle ecology, and technology</a></p>
<p><a href="http://www.gomaya.com/glyph/archives/001914.html" target="_blank">Maya Frontier, Usumacinta River videos</a></p>
<p><a href="http://en.wikipedia.org/wiki/Collapse_(book)" target="_blank">Collapse</a></p>
<p><a href="microcontrollers http://arduino.cc/" target="_blank">Arduino</a></p>
<p><a href="http://community.pachube.com/tutorials" target="_blank">Pachube &#8211; tutorials</a></p>
<p><a href="http://apps.pachube.com/" target="_blank">Pachube Apps </a>-</p>
<p><a href="http://www.pachube.com/feeds/1284" target="_blank">Arduino-SL-Pachube data site</a></p>
<p><a href="http://www.pachube.com/feeds/1505" target="_blank">SL to Pachube site</a></p>
<p><a href="http://www.zachhoeken.com/connecting-to-the-world" target="_blank">Dave&#8217;s Danger Shield &#8211; Pachube  tutorial</a></p>
<p><a href="http://www.ladyada.net/make/tweetawatt/" target="_blank">TweetaWatt site (LadyAda)</a></p>
<p><a href="http://www.gomaya.com/glyph/archives/002505.html" target="_blank">Dave&#8217;s post on TweetaWatt to Opensim/SL</a></p>
<p><a href="http://peterquirk.wordpress.com/2008/12/22/tutorial-using-the-streamlined-tool-chain-for-importing-sketchup-models-into-realxtend-04/" target="_blank">Peter Quirk&#8217;s post on Importing Sketchup into RealXtend</a></p>
<p><a href="http://opensimulator.org/wiki/Main_Page" target="_blank">Opensim</a></p>
<p><a href="http://www.realxtend.org/" target="_blank">RealXtend</a></p>
<p><a href="http://reactiongrid.com/" target="_blank">ReactionGrid</a></p>
<p><a href="http://homecamp.pbwiki.com/" target="_blank">homecamp</a></p>
<p><a href="http://www.cminion.com/wordpress/" target="_blank">cminion -wind turbines in OpenSim</a></p>
<p><a href="http://mikethebee.mevio.com/" target="_blank">MiketheBee</a></p>
<p><a href="http://www.ugotrade.com/2009/01/17/is-it-%E2%80%9Comg-finally%E2%80%9D-for-augmented-reality-interview-with-robert-rice/" target="_blank">Is it &#8220;OMG finally&#8221; for Augmented Reality?</a></p>
<p><a href="http://www.ugotrade.com/2008/12/15/smart-planetinterview-with-andy-stanford-clark/" target="_blank">Smart Planet: Interview with Andy Stanford-Clark</a></p>
<p><a href="http://www.orangecone.com/" target="_blank">Orange Cone &#8211; Information Shadows and Things as Services</a></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/04/19/sensor-networks-and-sustainability-connecting-real-virtual-mobile-and-augmented-reality/feed/</wfw:commentRss>
		<slash:comments>2</slash:comments>
		</item>
		<item>
		<title>Dematerializing the World, Shadows, Subscriptions and Things as Services: Talking With Mike Kuniavsky at ETech 2009</title>
		<link>http://www.ugotrade.com/2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/</link>
		<comments>http://www.ugotrade.com/2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/#comments</comments>
		<pubDate>Thu, 19 Mar 2009 03:16:11 +0000</pubDate>
		<dc:creator><![CDATA[Tish Shute]]></dc:creator>
				<category><![CDATA[Ambient Devices]]></category>
		<category><![CDATA[Ambient Displays]]></category>
		<category><![CDATA[Carbon Footprint Reduction]]></category>
		<category><![CDATA[digital public space]]></category>
		<category><![CDATA[Ecological Intelligence]]></category>
		<category><![CDATA[Energy Awareness]]></category>
		<category><![CDATA[Energy Saving]]></category>
		<category><![CDATA[home automation]]></category>
		<category><![CDATA[home energy monitoring]]></category>
		<category><![CDATA[home energy monitors]]></category>
		<category><![CDATA[Instrumenting the World]]></category>
		<category><![CDATA[internet of things]]></category>
		<category><![CDATA[mobile meets social]]></category>
		<category><![CDATA[new urbanism]]></category>
		<category><![CDATA[smart appliances]]></category>
		<category><![CDATA[Smart Devices]]></category>
		<category><![CDATA[Smart Planet]]></category>
		<category><![CDATA[sustainable living]]></category>
		<category><![CDATA[sustainable mobility]]></category>
		<category><![CDATA[ubiquitous computing]]></category>
		<category><![CDATA[Web 2.0]]></category>
		<category><![CDATA[Web Meets World]]></category>
		<category><![CDATA[World 2.0]]></category>
		<category><![CDATA[#etech]]></category>
		<category><![CDATA[Aaaron Straup Cope]]></category>
		<category><![CDATA[Adam Greenfield]]></category>
		<category><![CDATA[Ambient Orb]]></category>
		<category><![CDATA[AMEE]]></category>
		<category><![CDATA[BlinkM]]></category>
		<category><![CDATA[Bocci at ETech]]></category>
		<category><![CDATA[Bruce Sterling]]></category>
		<category><![CDATA[data shadows]]></category>
		<category><![CDATA[dematerializing products]]></category>
		<category><![CDATA[dematerializing the world]]></category>
		<category><![CDATA[dressing the shadows]]></category>
		<category><![CDATA[ecology of services]]></category>
		<category><![CDATA[econolypse]]></category>
		<category><![CDATA[embodied energy data]]></category>
		<category><![CDATA[energy identity]]></category>
		<category><![CDATA[Etech 2009]]></category>
		<category><![CDATA[Gavin Starks]]></category>
		<category><![CDATA[green technology]]></category>
		<category><![CDATA[information shadows]]></category>
		<category><![CDATA[item level identification]]></category>
		<category><![CDATA[LilyPad]]></category>
		<category><![CDATA[LoveM]]></category>
		<category><![CDATA[Maker culture]]></category>
		<category><![CDATA[Makershed]]></category>
		<category><![CDATA[Mike Kuniavsky]]></category>
		<category><![CDATA[Moore's Law]]></category>
		<category><![CDATA[Pachube]]></category>
		<category><![CDATA[Path Intelligence]]></category>
		<category><![CDATA[RFID tracking]]></category>
		<category><![CDATA[servicization of things]]></category>
		<category><![CDATA[smart LED]]></category>
		<category><![CDATA[spimes]]></category>
		<category><![CDATA[Stamen Design]]></category>
		<category><![CDATA[Steven Levy]]></category>
		<category><![CDATA[sustainable design]]></category>
		<category><![CDATA[the dotted line world]]></category>
		<category><![CDATA[the internet of things]]></category>
		<category><![CDATA[the shape of alpha]]></category>
		<category><![CDATA[Thinglink project]]></category>
		<category><![CDATA[ThingM]]></category>
		<category><![CDATA[things as services]]></category>
		<category><![CDATA[Tim O'Reilly]]></category>
		<category><![CDATA[ubicomp]]></category>
		<category><![CDATA[ubicomp hardware]]></category>
		<category><![CDATA[urban green space]]></category>
		<category><![CDATA[Usman Haque]]></category>
		<category><![CDATA[Wattzon]]></category>
		<category><![CDATA[WineM]]></category>
		<category><![CDATA[wireless networks]]></category>

		<guid isPermaLink="false">http://www.ugotrade.com/?p=3191</guid>
		<description><![CDATA[ETech 2009 was all about making interesting and deeply socially effective technological interventions in the world. And dematerializing products into services seemed to be one of the most powerful concepts elaborated there to accomplish this.Â  Mike Kuniavsky in his presentation, &#8220;The dotted-line world, shadows, services, subscriptions,&#8221; noted: &#8220;There&#8217;s great opportunity here to create an ecology [&#8230;]]]></description>
				<content:encoded><![CDATA[<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/bicycleriderdatashadows.jpg"><img class="alignnone size-medium wp-image-3192" title="bicycleriderdatashadows" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/bicycleriderdatashadows-300x230.jpg" alt="bicycleriderdatashadows" width="300" height="230" /></a></p>
<p><a href="http://en.oreilly.com/et2009" target="_blank">ETech 2009</a> was all about making interesting and deeply socially effective technological interventions in the world. And dematerializing products into services seemed to be one of the most powerful concepts elaborated there to accomplish this.Â  Mike Kuniavsky in his presentation, <a href="http://en.oreilly.com/et2009/public/schedule/speaker/1947" target="_blank"><strong>&#8220;The dotted-line world, shadows, services, subscriptions,&#8221;</strong></a> noted:</p>
<p><strong>&#8220;There&#8217;s great opportunity here to create an ecology of services embodied as robust, valuable, exciting new tools with focused, limited functionality, tied together with item-level identification and wireless networks. Whole classes of things that can enrich our lives and bank accounts are now possible thanks to the way ubiquitous computing interweaves services and devices at an intimate, everyday level&#8230;.<br />
</strong><br />
<strong>We now have the technology to create whole new classes of tools for living in a way that is more useful and fun for individuals, more sustainable for society, and more profitable for companies. That way is to recognize the connectedness of all everyday things, and to build on it, rather than ignoring it.&#8221;</strong></p>
<p>The picture opening this post is from Mike&#8217;s presentation (see <a id="zuqd" title="Mike's blog" href="http://www.orangecone.com/archives/2009/03/etech_2009_the.html">Mike&#8217;s blog</a> forÂ  <a href="http://www.orangecone.com/tm_etech_2009_0.1.pdf">a PDF with all of the images and notes</a> (884 PDF), and the original presentation description).</p>
<p>An ecosystem usingÂ  item-level identiï¬cation, wireless networking, and data visualization is evolving that links everyday objects to information about those objects &#8211; what Kuniavsky calls their â€œinformation shadow.â€Â  Because every object can be uniquely identified and that identification can be associated with a cluster of metadata, it &#8220;exists simultaneously in the physical world and in the world of data.&#8221;</p>
<p>Mike mentioned Tom Coates&#8217; <a href="http://www.plasticbag.org/archives/2005/04/the_age_of_pointatthings/" target="_blank">&#8220;Age of Point-At Things&#8221;</a> blog post to say that although Tom was talking about TV listings data, the same ideas can be applied to anything that&#8217;s uniquely identified. Also, Mike noted, he often references Ulla-Maaria Mutanen&#8217;s <a href=" http://aula.org/people/ulla/thinglink_white_paper.pdf" target="_blank">Thinglink project</a> and her observation about Amazon ASINs to explain this concept which is, of course, closely related to <a href=" http://en.wikipedia.org/wiki/Internet_of_things" target="_blank">the internet of things.</a></p>
<p>Until recently, Mike explained, accessing the information shadow was difï¬cult. The world of objects and the world ofÂ  information shadows were separated by the difï¬culty of getting at the information. But now, increasingly:</p>
<p><strong>&#8220;we can instantaneously see the world of information shadows as weâ€™re interacting with the world of objects.&#8221; </strong></p>
<p>Mike&#8217;s is not only conceptualizing these ideas, his company with partner Tod E. Kurt, <a id="zh2z" title="Thingm" href="http://thingm.com/" target="_blank">Thing<span class="ru_CC6D50_bk">M,</span></a> is producing hardware that will enable this vision.</p>
<p><strong>&#8220;We&#8217;re a ubiquitous computing consumer electronics company, which sounds fancy, but weâ€™re pretty small. We design, manufacture and sell ubicomp hardware.&#8221;</strong></p>
<p>ThingM may be small now but they are at the leading edge of huge transformation.Â  When asked, &#8220;How do you see the near-future city working with ubiquitous computing&#8230;&#8221; Adam Greenfield put it succinctly to Lalie Nicolas for <a href="http://www.lehub-agence.com/site.php">Le Hub</a>â€™s <a href="http://www.ludigo.net/index.php?rub=0">Ludigo</a> project:</p>
<p><strong>&#8220;I would go so far as to say that there will be no area or domain of urban activity that is not somehow disassembled and recomposed as a digital, networked, interactive process over the next few years. Objects, buildings and spaces will be reconceived as network resources; cars, subways and bicycles will be reimagined as on-demand mobility services; human communities are already well on the way to becoming self-conscious &#8216;social networks.&#8217;â€</strong></p>
<p>For the rest of this short interview <a href="http://speedbird.wordpress.com/2009/03/16/ludigo-interview/" target="_blank">see Adam&#8217;s post</a>, and for my recent long interview with Adam <a href="http://www.ugotrade.com/2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/" target="_blank">see here</a>.</p>
<h3>&#8220;&#8216;Almost everything in this room is in a landfill, but just doesn&#8217;t know it yet.&#8217;Â  This needs to change&#8221;</h3>
<p>(Tim O&#8217;Reilly responding on Twitter to a quote from <a href="http://twitter.com/AlexSteffen" target="_blank">@AlexSteffen</a>&#8216;s talk)</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/picture-5.png"><img class="alignnone size-medium wp-image-3194" title="picture-5" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/picture-5-300x241.png" alt="picture-5" width="300" height="241" /></a></p>
<p><em><span class="caps">Chart above from Jeremy Faludi&#8217;s presentation</span> <a class="attach" href="http://assets.en.oreilly.com/1/event/20/Priorities%20for%20a%20Greener%20World_%20If%20You%20Could%20Design%20Anything,%20What%20Should%20You%20Do_%20Presentation.pdf">Priorities for a Greener World: If You Could Design Anything, What Should You Do? Presentation</a> <span class="en_filetype">[PDF]</span></em> <span class="caps"> </span></p>
<p>Interconnecting themes at ETech,Â  <a id="nn8n" title="Inhabitat notes" href="http://www.inhabitat.com/2009/03/13/the-best-of-green-at-etech-2009/" target="_blank">Inhabitat noted,</a> &#8220;formed bridges between luminary speakers from a variety of backgrounds, as <a href="http://www.inhabitat.com/2006/10/26/worldchanging-the-book-is-out/">Alex Steffen</a>, <a href="http://www.inhabitat.com/2008/02/20/mary-lou-jepsen-at-greener-gadgets/">Mary Lou Jepsen</a>, <a href="http://www.faludidesign.com/">Jeremy Faludi</a>, and others reinforced the need to create repairable, open-source, <a href="http://www.inhabitat.com/2009/03/02/greener-gadgets-2009/">long lasting products</a>, reveal energy usage, and pursue forward-thinking strategies for a greener tomorrow.&#8221; But <a href="http://www.faludidesign.com/" target="_blank">Jeremy Faludi</a>, a sustainable design strategist and researcher<span class="caps">, </span><span class="caps">put the design challenge most directly:</span></p>
<p><span class="caps"> <strong>&#8220;</strong></span><strong>If you really care you need to dematerialize, turn products into services&#8230;&#8221; </strong></p>
<p>The idea of data shadows has been a part of the conversation in ubiquitous computing for a long time (since Marshall McLuhan perhaps?).Â  But, at ETech 2009, it seemed to have come of age.</p>
<p>It came up again and again, in the need to dematerialize stuff that seemed to be part of every conversation, from Faludi&#8217;s comments on the amount of toxic mining waste created in the manufacture of one laptop, to Raffi Krikorian&#8217;s presentation of <a href="http://www.wattzon.com/" target="_blank">Wattzon&#8217;s</a> Embodied Energy Database (<a href="http://www.slideshare.net/raffikrikorian/wattzon-etech-2009" target="_blank">see slides here</a>), and <a id="lnyt" title="AMEE" href="http://www.amee.com/" target="_blank">AMEE</a> founder, Gavin Stark&#8217;s presentation, <a name="session7799"></a> (also see <a href="http://www.amee.com/blog/2009/03/19/energy-identity/">Gavin&#8217;s blog on Energy Identity here</a>).</p>
<p>The path to dematerializing the burdensome stuff that spells doom for our environment was not only presented conceptually and in creative solutions to specific problems (e.g. ThingM) at ETech. There were also hands on workshops (see <a href="http://www.ugotrade.com/2009/03/10/making-a-rfid-to-web-interface-and-lilypad-electronic-fashion-at-etech-2009/" target="_blank">my post on the two I attended</a>) from Maker gurus, who were also often to be found in the <a href="http://en.oreilly.com/et2009/public/schedule/detail/7281" target="_blank">Makershed</a>, providing opportunities to experiment with and prototype your own solutions (my hat is off to <a href="http://en.oreilly.com/et2009/public/content/about" target="_blank">Brady Forrest and the ETech committee</a> for pulling all this together).</p>
<h3>Connecting the dots&#8230;</h3>
<p>In the wake of an &#8220;econolypse,&#8221; (neologism pulled from Bruce Sterling&#8217;s twitter feed -Â  @bruces) and on the eve of environmental catastrophe, we may well have, as Adam Greenfield <a href="http://www.ugotrade.com/2009/02/27/towards-a-newer-urbanism-talking-cities-networks-and-publics-with-adam-greenfield/" target="_blank">said to me here</a>, &#8220;seriously screwed the pooch.&#8221;</p>
<p>But that does not mean we should not do everything we can to try to save the day.</p>
<p>And in the serendipity peculiar to a conference, I was talking  in the corridor to Gavin Starks of <a id="lnyt" title="AMEE" href="http://www.amee.com/" target="_blank">AMEE</a> who is working to create &#8220;the world&#8217;s energy meter&#8221; (on the right in the picture below), and Tony Mak from <a id="hc7p" title="O'Reilly AlphaTech Ventures" href="http://www.oatv.com/" target="_blank">O&#8217;Reilly AlphaTech Ventures</a> (to Gavin&#8217;s right), and Usman Haque of <a id="vp25" title="Pachube" href="http://www.pachube.com/">Pachube</a> (on Tony&#8217;s right) <a id="ihta" title="-see my earlier interview here" href="../../2009/01/28/pachube-patching-the-planet-interview-with-usman-haque/" target="_blank">- see my earlier interview with Usman here</a>), when Tim O&#8217;Reilly (far left) came by with Steven Levy of WiredÂ  (to Tim&#8217;s left).Â  More on <a id="vp25" title="Pachube" href="http://www.pachube.com/">Pachube</a>, <a id="vwro" title="WattzOn" href="http://www.wattzon.com/" target="_blank">WattzOn</a>, <a id="lnyt" title="AMEE" href="http://www.amee.com/" target="_blank">AMEE</a> and <a href="http://www.pathintelligence.com/" target="_blank">Path Intelligence</a> and how these projects may connect in an upcoming post.Â  Path Intelligence like AMEE is funded by the O&#8217;Reilly Venture group.</p>
<p>And no sooner had I snapped the photo below, Mike Kuniavsky arrived.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_170dxf8g9hg_b.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/timoreillytalkingtogavinstarkspost2.jpg"><img class="alignnone size-medium wp-image-3276" title="timoreillytalkingtogavinstarkspost2" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/timoreillytalkingtogavinstarkspost2-300x180.jpg" alt="timoreillytalkingtogavinstarkspost2" width="300" height="180" /></a></p>
<p>It seemed such an historic meeting, I asked everyone if I could switch my recorder on.</p>
<p>Tim had just been explaining how the concept of &#8220;data shadows&#8221; fit with something he&#8217;d learned from Gavin in a breakfast conversation. Â Gavin was talking about what AMEE is learning from smart meter data collected from 1.2 million homes in the UK. Â The energy signature from each device is so unique that you can tell not only the make and model of major appliances in each home, but its age. Â  Gavin is worried about the privacy implications (as we all should be), but nonetheless, you can see the implications for business. Tim framed a vital question:<strong> What new businesses are growing in the data shadows?</strong></p>
<p><strong>Tim O&#8217;Reilly: </strong>Here&#8217;s the other member of this conversation I was trying to broker. This is Mike Kuniavsky, Gavin Starks. I was talking in your session about the point he made in his session&#8230;Steve Levy from Wired&#8230;</p>
<p><strong>Tish Shute:</strong> sorry, could you recap the point?</p>
<p><strong>Tim O&#8217;Reilly:</strong> &#8230;just the idea about data shadows, I just think it&#8217;s just such a powerful metaphor that every .. and you went on to explain that potential for subscriptions and so on&#8230;</p>
<p><strong>Mike Kuniavsky:</strong> Yes well what I was saying was that essentially every object that has an identifier associated with it, and there are a number of different kinds of identifiers out there, simultaneously lives in kind of the world of physical objects, and of the world of data. And the identifier links those two.</p>
<p><strong>Steven Levy:</strong> Just like Sterling&#8217;s Spimes?</p>
<p><strong>Mike Kuniavsky:</strong> A spime, it&#8217;s related obviously because we&#8217;re talking about RFIDs, but I&#8217;m really specifically talking about the fact that there is this information shadow that exists out there.</p>
<p><strong>Tim O&#8217;Reilly:</strong> I think we&#8217;ll find it lots of different ways, that was my excitement in connecting these points.</p>
<p><strong>Gavin Starks:</strong> My take on it is energy identity &#8211; that everything and everybody ends up with an energy identity that is the embodiment of their physical consumption.</p>
<p><strong>Mike Kuniavsky:</strong> And I would say, not to argue, I would say that energy comes as part of my information shadow. Like I carry this baggage of data along with me. And whatever data is potentially appropriate can be glommed on to that. And then that can then be carried to something else that can manipulate it. And also that&#8217;s true about every object. And now that we have RFID tracking of individual objects, it&#8217;s true about literally every object, not just every class of objects.</p>
<p><strong>Usman Haque:</strong> There&#8217;s a really beautiful story by Julio Cortazar where he uses the phrase &#8220;dressing the shadows&#8221; and it&#8217;s about the idea the shadow is not this sort of flat black thing but we can sort of put things onto it and slowly sort of grow it into something. It&#8217;s actually sort of more of a love story. But it&#8217;s a really interesting idea that the shadow&#8217;s not just the absence of but that it&#8217;s kind of the important part of it [for more see Usman&#8217;s paper, <a href="http://www.haque.co.uk/papers/dressingshadowsofarch.pdf" target="_blank">Dressing the shadows of architecture</a> &#8211; which is also available in spanish <a href="http://www.tintank.es/articulo_vestirsombras.html" target="_blank">here</a>.]</p>
<p><strong>Mike Kuniavsky:</strong> It&#8217;s the Peter Pan Barrie [JM Barrie, the author] thing. When Peter Pan&#8217;s shadow gets cut off and Wendy has to resew it back on. Potentially what all of these item level identification technologies are doing is they&#8217;re sewing the shadow back to the objects that they came from. And so you&#8217;re getting the information.</p>
<p><strong>Gavin Starks:</strong> It&#8217;s like the two and a half kilo Macbook which has a 460 kilo carbon shadow.</p>
<p><strong>Tim O&#8217;Reilly:</strong> It&#8217;s just a very powerful concept. That&#8217;s all I&#8217;m saying. I think it&#8217;s a metaphor that as soon as you have it, it makes it very easy to understand and to see a whole lot of things. So I&#8217;m very fond of it. Already it&#8217;s my new favorite toy. And it is great running into you all in the same place in the hall so I could introduce you all.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_173c5f8nvcm_b.png"><img class="alignnone size-medium wp-image-3203" title="dhj5mk2g_173c5f8nvcm_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_173c5f8nvcm_b-300x231.png" alt="dhj5mk2g_173c5f8nvcm_b" width="300" height="231" /></a></p>
<p><em>Image from Mike&#8217;s ETech presentation</em><br />
<strong><br />
&#8220;To create these new experiences we need to think about the design of both digital devices and infrastructures differently. We need step back from standalone tools and think about what service those tools deliver, then construct new avatars that fit better into people&#8217;s everyday experiences. We also need to step back from our infrastructural products and think about what services they enable. The electrical grid did not first start out as an abstract electrical grid in South Manhattan; it started as a way to deliver electric light. The electric bulb was not a standalone device, it was an avatar of Edison&#8217;s light delivery service and it was, first and foremost, designed to solve a specific problem for a large consumer market. Only then did the infrastructure it created expand to solve other kinds of problems.&#8221; Mike Kuniavsky&#8217;s ETech presentation, 2009</strong></p>
<p><strong><br />
</strong></p>
<h3><strong>Talking With Mike Kuniavsky</strong></h3>
<p><strong> </strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/elizabethandmikeballpost.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/elizabethgoodmanandmikekuniavskyballpost.jpg"><img class="alignnone size-medium wp-image-3280" title="elizabethgoodmanandmikekuniavskyballpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/elizabethgoodmanandmikekuniavskyballpost-300x199.jpg" alt="elizabethgoodmanandmikekuniavskyballpost" width="300" height="199" /></a><br />
</strong></p>
<p><em>Mike Kuniavsky and Elizabeth Goodman playing Bocci after ETech</em></p>
<p>The conversation with Mike began with a discussion about how to encourage participation. Usman Haque was present but he was called to lunch shortly.Â  The question of encouraging participation in deep social change was another recurring theme at ETech.Â  And, as Mike noted in his presentation:</p>
<p><strong>&#8220;The design of these avatars [Kuniavsky's term for objects that are closely tied to services] is quite challenging. They canâ€™t really be as personalized. You just can&#8217;t pimp your City Carshare car. You only get one kind of bike in the Call a Bike program. That&#8217;s an important problem to solve. We love to have our stuff be ours. However, the same technologies can bring that, too. Our key fob can bring our whole world with us, and whether sit down in a minivan, on a chair or in a plane we can bring our world with us. The thing can become our preferred colors, with our favorite music, and a picture of our loved ones on the dahboard, desk, or wall. Is it the same thing as owning it and Â  leaving your stuff in it? No, but it&#8217;s closer.&#8221;<br />
</strong></p>
<p>Moreover:</p>
<p><strong>.. objects have to change at a fundamental level. They have to be designed differently and they have to be described and discussed differently. The â€œownerâ€™sâ€ relationship to the object changes. The very idea of ownership changes. The solid object grows a dotted line that is filled-in as-needed, when-needed, and with the features that are needed. This is not the same thing as renting or co-ownership, its anytime/anywhere nature-enabled by the underlying technology makes these new service objects fundamentally new (Kuniavsky&#8217; presentation at ETech).<br />
</strong><br />
Elizabeth Goodman&#8217;s brilliant presentation at ETech, <a id="eag1" title="Designing for Urban Green Space" href="http://en.oreilly.com/et2009/public/schedule/detail/5562" target="_blank">Designing for Urban Green Space,</a> discussed a study of urban green space volunteership as a way &#8220;to rethink urban green space as a spectrum of places with varying types of ownership and management.&#8221;Â  Mike began the conversation by citing Elizabeth&#8217;s work.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_178gdn22ngf_b.png"><img class="alignnone size-medium wp-image-3208" title="dhj5mk2g_178gdn22ngf_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_178gdn22ngf_b-300x219.png" alt="dhj5mk2g_178gdn22ngf_b" width="300" height="219" /></a></p>
<p><em>Picture from <a href="http://en.oreilly.com/et2009/public/schedule/detail/5562" target="_blank">Elizabeth Goodman&#8217;s presentation</a>.</em></p>
<p><strong>Mike Kuniavsky:</strong> Well what I was saying [re participation], citing my wife Elizabeth Goodman&#8217;s work &#8230;She did all this work at Intel on people&#8217;s health practices and the issues [around] instrumenting people&#8217;s lives in order to produce behavioral change and the problems with that.</p>
<p>The question is how do you, sense to encourage, rather than sense to punish, when all the indicators are going down, like economic indicators, ecological indicators. They&#8217;re just not going to be going up perceptibly in a very long time. You don&#8217;t want to discourage people. The way to create behavioral change is not to essentially keep punishing people for the past. And so I don&#8217;t know if I have a good answer for this, but there is this entire kind of thinking about how do you encourage people to keep doing things even when the actual easy-to-measure indicators like the first order indicators are all pointing down. It&#8217;s the classic thing about how do you get people to stay fit even as they&#8217;re aging. They are never going to be as healthy as they were when they were 50 again.</p>
<p><strong>Usman Haque:</strong> I think you really hit on it when you said it&#8217;s not about the first order but about the second order measurements because that is exactly the kind of thing you want to change. It&#8217;s not that you want to stop it from falling because sometimes it&#8217;s impossible, you want to slow it&#8217;s rate.</p>
<p><strong>Mike Kuniavsky:</strong> Exactly. You want to slow the rate because at the bottom maybe you can start looking at the first order indicator. But you can&#8217;t look at the first order indicator while things are going to hell. And so you can just say it&#8217;s less bad than it would have been. And figuring out how to take the first order sensory data and turn it into this kind of second order data that might be helpful for actually creating behavioral change, because ultimately that&#8217;s what all of this is talking about.</p>
<p><strong>Tish Shute: </strong>This discussion about behavioral change wasn&#8217;t elaborated in your presentation was it?</p>
<p><strong>MK:</strong> I presented on essentiallyÂ  the combination of being able to identify individual objects and the idea of providing services as a way of creating things&#8230; the servicization of things &#8230;turningÂ  things into services is greatly accelerated by network technologies and the ability to track things and what leads this to the potential of having fundamentally different relationships to the devices in our lives and to things like ownership.</p>
<p>Like we now have the technology to create objects that are essentially representatives of services &#8211; things like City Car Share.Â  What you own is not a thing but a possibility space of a thing.Â  This fundamentally changes the design challenges.Â  I am pretty convinced that this is how we should be using a lot of these technologies is to be shifting objects from ownership models to service models.Â  We can do that but there are significant challenges with it. What is happening is that we have had the technology to do this for a while, but we haven&#8217;t be thinking about how to design these services.Â  We haven&#8217;t been thinking about how to design what I call the avatars of these services &#8211; the physical objects that are the manifestation of them, like an ATM is the avatar of a banking service.Â  It is useless without the banking service it is a representative of, essentially.</p>
<p>If you imagine a this as an abstract idea, the ATM pokes out of [the service and into] a specific thing, but so does the bank tellers and so does the web site.</p>
<p><strong>TS:</strong> It seems like this is a major shift in how we conceptualize our economy, culture and even government &#8211; what are the avatars of government?</p>
<p><strong>MK:</strong> I think change in government is very hard. The example I have been using is the light bulb.Â Â  Start by solving a problem. The interesting thing about lightbulbs is that it was not the invention of an incandescent filament that glowed in a vacuum&#8211;that had been invented long before&#8211;it was the system that it was part of.Â  And that is was part of a much larger design project that was created specifically for delivering the service of light to lower Manhattan in 1884.</p>
<p><strong>TS:</strong> The grid hasn&#8217;t changed since Edison right &#8211; one of the earlier speakers mentioned this, that if Edison came back now he would say, &#8220;the grid is where I left it.&#8221;</p>
<p><strong>MK:</strong> My point is that he wasn&#8217;t creating an abstract electrical grid, he was solving a problem by creating a system that had as its avatar &#8211; as its end point this bulb. But the bulb is actually not the system, it is merely the end point.</p>
<p>As we are thinking about the capabilities of these technologies my argument is we have to be designing service systems along that model.</p>
<p><strong>TS:</strong> Web services?</p>
<p><strong>MK:</strong> Not just designing Web services.Â  I am a big fan of thinking about digital tools outside the context of general purpose computing devices. I consider laptops general purpose and I consider phones general purpose.Â  Yes originally the handset started out just as a phone but now it is essentially a computer terminal and now you have netbooks and netbooks are essentially this halfway point between a phone and a laptop because now you are going to get net books with G3 cards.Â  Essentially it is already a big phone.Â  Those are general purpose computing platform, and I am not very interested in those.</p>
<p><strong>TS:</strong> What motivated you to make that move in your thinking?</p>
<p><strong>MK:</strong> I thought it was very narrow kind of thinking.Â  I thought that the costs of computing represented by the technologies in the middle of the Moore&#8217;s Law curve &#8211; rather than on the right &#8211; that the cost of that had dropped so far that it seemed we could be making all kinds of devices that had information processing as part of what it is without being general purpose computing platforms.</p>
<p>The ipod is a good example.Â  The ipod is a computer and you can run linux on it. It has more computing power than an computer did in the seventies. But who cares? The point of it is that you are using that power to solve a problem. You are applying the capabilities of information processing to solve specific problems. I have actually worked on infrastructural stuff. Twenty years ago I was associated with some early distributed computing stuff, then I did ten tears of web site design stuff, but i am essentially done with that. Because what I am really interested in isÂ  creating new kinds of tool, new classes of tools that use information processing as the core of what makes them interesting and valuable.</p>
<p><strong>TS:</strong> Do these tools have to leverage networks to be useful?</p>
<p><strong>MK: </strong> No I think it is possible to use information processing in a small scale without having to be online all the time.Â  That is another one of the big toolboxes.Â  It creates a deep shift in the capabilities of what you can do if you have a network.Â  But the network can be really, really low bandwidth and simple for it still to be useful. You get these things that wake up once a month and spit out a packet with their telemetry.Â  And they are incredibly valuable but they are not what you would normally consider to be an always-on device.Â  It changes what they can do very fundamentally.Â  But it is not this thing that requires there to be blanket wifi.</p>
<p>You can have devices out there and this is the sort of a cliched example but the guy riding a bicycle around with a wifi access point in rural area where you have no infrastructure to do it otherwise.Â  But you have a little computer in every area and as he rides by they will exchange some data.</p>
<p><strong>You don&#8217;t have to have fibre at the curb to really, really make interesting deeply socially effective technological interventions in the world. </strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/aaaroncopetodekurtmikekuniavskypost.jpg"><img class="alignnone size-medium wp-image-3210" title="aaaroncopetodekurtmikekuniavskypost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/aaaroncopetodekurtmikekuniavskypost-300x199.jpg" alt="aaaroncopetodekurtmikekuniavskypost" width="300" height="199" /></a></strong></p>
<p><em><a id="d3_j" title="Aaron Starup Cope," href="http://en.oreilly.com/where2009/public/schedule/speaker/43824" target="_blank">Aaron Straup Cope,</a> Flickr, Tod E. Kurt, and Mike Kuniavsky &#8211; discussing <a id="rzgd" title="The Shape of Alpha" href="http://en.oreilly.com/where2009/public/schedule/detail/7212" target="_blank">The Shape of Alpha</a> (more on this upcoming!)<strong><br />
</strong></em><br />
<strong>MK:</strong> What we are trying to do is to do that.Â  We make a BlinkM &#8211; we make hardware &#8211; you saw my business partner Tod E. Kurt, he does all the heavy engineering and I am the guy who waves his hands around a lot and sends faxes.Â  We came out with our first product a year ago was a smart LED.Â  It is very simple RGB LED, it has a microcontroller and the microcontroller has firmware on it that kind of abstracts out the complexity of incorporating LEDs into a hobbyist product.Â  So you can do arbitrary colors, so it can do smooth fades between any two points in RGB space, you don&#8217;t need to know anything about Pulse Width Modulation or even microcontrollers.Â  You don&#8217;t have to know anything about anything except a little bit about electricity to use the thing. [In addition to <a id="hy-z" title="Blinkm" href="http://thingm.com/products/blinkm.html" target="_blank">BlinkM</a>, <a id="g8y3" title="Blinkm Maxm" href="http://thingm.com/products/blinkm-maxm.html" target="_blank">BlinkM MaxM</a> &#8211; the smart LED, Thingm has developed prototypes for other products such as the <a id="hqwc" title="Winem" href="http://thingm.com/products/winem.html" target="_blank">WineM</a> RFID wine rack and <a href="http://thingm.com/sketches/lovem.html" target="_blank">LoveM LCD chocolate box</a>.]</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_174cf26bcgn_b.png"><img class="alignnone size-medium wp-image-3211" title="dhj5mk2g_174cf26bcgn_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_174cf26bcgn_b-224x300.png" alt="dhj5mk2g_174cf26bcgn_b" width="224" height="300" /></a></p>
<p><strong>TS:</strong> I made a <a href="http://www.arduino.cc/en/Main/ArduinoBoardLilyPad" target="_blank">LilyPad</a> enabled Tshirt yesterday, if I used your LED what difference would that make to my Tshirt?</p>
<p><strong>MK:</strong> You could have the LED without changing the circuit at all, you could have it blink in any pattern, be any color, fade between colors. With our new one which is bigger than the old one, we actually have inputs. You could stick a wire on it or weave it into your shirt, and when you touch the wire it would change the behaviour of the LED.</p>
<p><strong>TS:</strong> Nice, you are giving me even more incentive to finish my T-Shirt. I noticed that Tim O&#8217;Reilly was connecting you to Gavin Starks, CEO of AMEE just now, and Usman Haque of Pachube.Â  What is the connection between you work on Thingm and these projects?</p>
<p><strong>MK:</strong> I think what Gavin&#8217;s doing, as I understand it from Tim, he is essentially creating this new kind of sensor network that monitors electrical usage and allows you to feed it back. What that does is that creates a new kind of data in the data shadow of your house, you refrigerator or whatever. It suddenly grows this extra lobe out in the data world that then has these new capabilities that can be attached to.</p>
<p><strong>TS: </strong>In terms of what you do with ThingM how are these ideas expressed through BlinkM?</p>
<p><strong>MK:</strong> We&#8217;re still building stuff that&#8217;s on a slightly lower level, components. Our corporate goal this year is to make our first product, a stand alone solution to something. One of the easiest things you can do with our technology right now is you can replicate an Ambient Orb in about ten minutes. You could tie into their work. But you could also tie into it in a more subtle way where you could make lights smart so that when the net electricity cost goes above a certain threshold the lights know to dim or to turn off. And that can be dependant on how people use them. So rather than having a light you essentially associate a function or purpose with a light. Then the light knows based on electricity usage when it&#8217;s purpose has high priority enough to be on.</p>
<p>Not all of these ideas pour into our products, we can only afford to make LEDs.</p>
<p><strong>TS:</strong> Still it is amazing how ThingM really is a flagship for what is big and important shift in the way we can relate to stuff. And what about Usman&#8217;s Pachube. Where does ThingM fit with that?</p>
<p><strong>MK:</strong> I see Pachube less as a monolithic service than as a standard for device communication. Essentially it&#8217;s a proposal for interdevice communication, and potentially an easy way for people to define the way devices behave within their own personal ecology of smart devices. It&#8217;s something that&#8217;s in the early stages, and I think the barriers are not technological, the barriers are social. The barriers are understanding what this is for and why to use it. It&#8217;s not about will it work. It&#8217;ll work.</p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_177pc5g76g5_b.png"><img class="alignnone size-medium wp-image-3213" title="dhj5mk2g_177pc5g76g5_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_177pc5g76g5_b-300x230.png" alt="dhj5mk2g_177pc5g76g5_b" width="300" height="230" /></a></p>
<p><em>Image from Mike&#8217;s ETech presentation &#8211; original image source: Yottamark</em></p>
<p><strong>&#8220;You can, hypothetically, look at any object and know where it was made, what it is made of, what your friends think of it, how much it sells for on Ebay, how to cook it, how to ï¬x it, how to recycle it, whatever. Any information thatâ€™s available about an object can now be available immediately and associated with that object.&#8221; </strong></p>
<p><strong><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_179fkxx3bg9_b.png"><img class="alignnone size-medium wp-image-3214" title="dhj5mk2g_179fkxx3bg9_b" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_179fkxx3bg9_b-300x231.png" alt="dhj5mk2g_179fkxx3bg9_b" width="300" height="231" /></a></strong></p>
<p><strong>&#8220;Connect it with location information and you have Location Based Services for anything. This is Cabspotting by Stamen. As Tom Coates says, once we have a handle, you can throw the data around.&#8221; (Kuniavsky)</strong></p>
<p>More to come on Stamen Design later! <a href="http://en.oreilly.com/public/schedule/speaker/2156">Tom Carden</a> (Stamen Design) ran a workshop at ETech 2008, <a id="bcqk" title="&quot;Live, Vast and Deep: Web-native Information Visualization,&quot;" href="http://en.oreilly.com/et2008/public/schedule/detail/1585" target="_blank">&#8220;Live, Vast and Deep: Web-native Information Visualization,&#8221;</a> outlining the process of taking a real data set from an online <span class="caps">API</span> (such as <a href="http://flickr.com/services/api">Flickr</a> or <a href="http://dopplr.pbwiki.com/">Dopplr</a>) and shaping it into an informative, beautiful, and useful interactive graphic presentation and this year, <a href="http://en.oreilly.com/et2009/public/schedule/speaker/3486">Michal Migurski</a> (Stamen Design),  	 	<a href="http://en.oreilly.com/et2009/public/schedule/speaker/40013">Shawn Allen</a> (Stamen Design) gave a workshop on <a id="nbzw" title="&quot;Maps from Scratch: Online Maps from the Ground Up.&quot;" href="http://en.oreilly.com/et2009/public/schedule/detail/5555" target="_blank">&#8220;Maps from Scratch: Online Maps from the Ground Up.&#8221;</a> <a id="k6oi" title="Eric Rodenbeck" href="http://en.oreilly.com/et2009/public/schedule/speaker/2160" target="_blank">Eric Rodenbeck</a>, founder and creative director of Stamen Design, presented on, <a id="q4up" title="&quot;New Data Visualization: Reaching Through Maps.&quot;" href="http://en.oreilly.com/et2009/public/schedule/detail/5438" target="_blank">&#8220;New Data Visualization: Reaching Through Maps.&#8221;</a></p>
<p><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/dhj5mk2g_180g6zstxc4_b.jpg"></a><a href="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/ercirodenbeckandshawnallenpost.jpg"><img class="alignnone size-medium wp-image-3279" title="ercirodenbeckandshawnallenpost" src="http://www.ugotrade.com/wordpress/wp-content/uploads/2009/03/ercirodenbeckandshawnallenpost-300x199.jpg" alt="ercirodenbeckandshawnallenpost" width="300" height="199" /></a></p>
<p><em>The picture above is of Eric Rodenbeck and Shawn Allen playing Bocci.</em></p>
]]></content:encoded>
			<wfw:commentRss>http://www.ugotrade.com/2009/03/18/dematerializing-the-world-shadows-subscriptions-and-things-as-services-talking-with-mike-kuniavsky-at-etech-2009/feed/</wfw:commentRss>
		<slash:comments>16</slash:comments>
		</item>
	</channel>
</rss>
